From commits-noreply at bitbucket.org Tue Mar 1 01:23:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 01:23:01 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: Fix these tests on Windows Message-ID: <20110301002301.201452A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42354:708c8e2cc10c Date: 2011-03-01 01:21 +0100 http://bitbucket.org/pypy/pypy/changeset/708c8e2cc10c/ Log: Fix these tests on Windows diff --git a/pypy/rlib/test/test_libffi.py b/pypy/rlib/test/test_libffi.py --- a/pypy/rlib/test/test_libffi.py +++ b/pypy/rlib/test/test_libffi.py @@ -77,6 +77,7 @@ c_file = udir.ensure("test_libffi", dir=1).join("foolib.c") # automatically collect the C source from the docstrings of the tests snippets = [] + exports = [] for name in dir(cls): if name.startswith('test_'): meth = getattr(cls, name) @@ -84,9 +85,12 @@ # improved: so far we just check that there is a '{' :-) if meth.__doc__ is not None and '{' in meth.__doc__: snippets.append(meth.__doc__) + import re + for match in re.finditer(" ([a-z_]+)\(", meth.__doc__): + exports.append(match.group(1)) # c_file.write(py.code.Source('\n'.join(snippets))) - eci = ExternalCompilationInfo(export_symbols=[]) + eci = ExternalCompilationInfo(export_symbols=exports) cls.libfoo_name = str(platform.compile([c_file], eci, 'x', standalone=False)) From commits-noreply at bitbucket.org Tue Mar 1 01:23:02 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 01:23:02 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: Fix more tests Message-ID: <20110301002302.0E9552A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42355:f3d6a0cabcca Date: 2011-03-01 01:22 +0100 http://bitbucket.org/pypy/pypy/changeset/f3d6a0cabcca/ Log: Fix more tests diff --git a/pypy/translator/c/node.py b/pypy/translator/c/node.py --- a/pypy/translator/c/node.py +++ b/pypy/translator/c/node.py @@ -337,8 +337,6 @@ self.varlength = varlength self.dependencies = {} contained_type = ARRAY.OF - if ARRAY._hints.get("render_as_void"): - contained_type = Void self.itemtypename = db.gettype(contained_type, who_asks=self) self.fulltypename = self.itemtypename.replace('@', '(@)[%d]' % (self.varlength,)) @@ -683,6 +681,8 @@ def getptrname(self): if barebonearray(self.getTYPE()): + if self.getTYPE()._hints.get("render_as_void"): + return '(void *)%s' % self.name return self.name return ContainerNode.getptrname(self) diff --git a/pypy/rlib/libffi.py b/pypy/rlib/libffi.py --- a/pypy/rlib/libffi.py +++ b/pypy/rlib/libffi.py @@ -6,6 +6,7 @@ from pypy.rlib.clibffi import get_libc_name, FUNCFLAG_CDECL, AbstractFuncPtr, \ push_arg_as_ffiptr, c_ffi_call from pypy.rlib.rdynload import dlopen, dlclose, dlsym, dlsym_byordinal +from pypy.rlib.rdynload import DLLHANDLE class types(object): """ @@ -286,7 +287,7 @@ class CDLL(object): def __init__(self, libname): """Load the library, or raises DLOpenError.""" - self.lib = lltype.nullptr(rffi.CCHARP.TO) + self.lib = rffi.cast(DLLHANDLE, 0) ll_libname = rffi.str2charp(libname) try: self.lib = dlopen(ll_libname) @@ -296,7 +297,7 @@ def __del__(self): if self.lib: dlclose(self.lib) - self.lib = lltype.nullptr(rffi.CCHARP.TO) + self.lib = rffi.cast(DLLHANDLE, 0) def getpointer(self, name, argtypes, restype, flags=FUNCFLAG_CDECL): return Func(name, argtypes, restype, dlsym(self.lib, name), From commits-noreply at bitbucket.org Tue Mar 1 05:03:37 2011 From: commits-noreply at bitbucket.org (vincentlegoll) Date: Tue, 1 Mar 2011 05:03:37 +0100 (CET) Subject: [pypy-svn] pypy default: Add lineno & col_offset assertions to tests Message-ID: <20110301040337.3B5252A2031@codespeak.net> Author: Vincent Legoll Branch: Changeset: r42356:d3fb4ef0c74a Date: 2011-03-01 01:08 +0100 http://bitbucket.org/pypy/pypy/changeset/d3fb4ef0c74a/ Log: Add lineno & col_offset assertions to tests diff --git a/pypy/interpreter/pyparser/test/test_futureautomaton.py b/pypy/interpreter/pyparser/test/test_futureautomaton.py --- a/pypy/interpreter/pyparser/test/test_futureautomaton.py +++ b/pypy/interpreter/pyparser/test/test_futureautomaton.py @@ -15,11 +15,15 @@ f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_DIVISION + assert f.lineno == 2 + assert f.col_offset == 0 def test_comment(): s = '# A comment about nothing ;\n' f = run(s) assert f.pos == len(s) + assert f.lineno == -1 + assert f.col_offset == 0 def test_tripledocstring(): s = '''""" This is a @@ -28,6 +32,8 @@ ''' f = run(s) assert f.pos == len(s) + assert f.lineno == -1 + assert f.col_offset == 0 def test_escapedquote_in_tripledocstring(): s = '''""" This is a @@ -36,19 +42,23 @@ ''' f = run(s) assert f.pos == len(s) - - + assert f.lineno == -1 + assert f.col_offset == 0 def test_empty_line(): s = ' \t \f \n \n' f = run(s) assert f.pos == len(s) + assert f.lineno == -1 + assert f.col_offset == 0 def test_from(): s = 'from __future__ import division\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_DIVISION + assert f.lineno == 1 + assert f.col_offset == 0 def test_froms(): s = 'from __future__ import division, generators, with_statement\n' @@ -57,12 +67,16 @@ assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) + assert f.lineno == 1 + assert f.col_offset == 0 def test_from_as(): s = 'from __future__ import division as b\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_DIVISION + assert f.lineno == 1 + assert f.col_offset == 0 def test_froms_as(): s = 'from __future__ import division as b, generators as c\n' @@ -70,12 +84,16 @@ assert f.pos == len(s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) + assert f.lineno == 1 + assert f.col_offset == 0 def test_from_paren(): s = 'from __future__ import (division)\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_DIVISION + assert f.lineno == 1 + assert f.col_offset == 0 def test_froms_paren(): s = 'from __future__ import (division, generators)\n' @@ -83,6 +101,8 @@ assert f.pos == len(s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) + assert f.lineno == 1 + assert f.col_offset == 0 def test_froms_paren_as(): s = 'from __future__ import (division as b, generators,)\n' @@ -90,6 +110,8 @@ assert f.pos == len(s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) + assert f.lineno == 1 + assert f.col_offset == 0 def test_multiline(): s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,)\nfrom __future__ import with_statement\n' @@ -98,6 +120,8 @@ assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) + assert f.lineno == 4 + assert f.col_offset == 0 def test_windows_style_lineendings(): s = '"abc" #def\r\n #ghi\r\nfrom __future__ import (division as b, generators,)\r\nfrom __future__ import with_statement\r\n' @@ -106,6 +130,8 @@ assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) + assert f.lineno == 4 + assert f.col_offset == 0 def test_mac_style_lineendings(): s = '"abc" #def\r #ghi\rfrom __future__ import (division as b, generators,)\rfrom __future__ import with_statement\r' @@ -121,6 +147,8 @@ assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) + assert f.lineno == 3 + assert f.col_offset == 55 def test_full_chain(): s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,); from __future__ import with_statement\n' @@ -136,34 +164,42 @@ assert flags & fut.CO_FUTURE_WITH_STATEMENT == 0 assert pos == (1, 0) - def test_nonexisting(): s = 'from __future__ import non_existing_feature\n' f = run(s) assert f.pos == len(s) assert f.flags == 0 + assert f.lineno == 1 + assert f.col_offset == 0 def test_from_import_abs_import(): s = 'from __future__ import absolute_import\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_ABSOLUTE_IMPORT - + assert f.lineno == 1 + assert f.col_offset == 0 def test_raw_doc(): s = 'r"Doc"\nfrom __future__ import with_statement\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT + assert f.lineno == 2 + assert f.col_offset == 0 def test_unicode_doc(): s = 'u"Doc"\nfrom __future__ import with_statement\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT + assert f.lineno == 2 + assert f.col_offset == 0 def test_raw_unicode_doc(): s = 'ru"Doc"\nfrom __future__ import with_statement\n' f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT + assert f.lineno == 2 + assert f.col_offset == 0 From commits-noreply at bitbucket.org Tue Mar 1 05:03:37 2011 From: commits-noreply at bitbucket.org (vincentlegoll) Date: Tue, 1 Mar 2011 05:03:37 +0100 (CET) Subject: [pypy-svn] pypy default: Add lineno & col_offset assertion to test_mac_style_lineendings() Message-ID: <20110301040337.BFC952A2031@codespeak.net> Author: Vincent Legoll Branch: Changeset: r42357:be4acb6f78ce Date: 2011-03-01 01:10 +0100 http://bitbucket.org/pypy/pypy/changeset/be4acb6f78ce/ Log: Add lineno & col_offset assertion to test_mac_style_lineendings() This test now fails as line numbers are not correctly handled. diff --git a/pypy/interpreter/pyparser/test/test_futureautomaton.py b/pypy/interpreter/pyparser/test/test_futureautomaton.py --- a/pypy/interpreter/pyparser/test/test_futureautomaton.py +++ b/pypy/interpreter/pyparser/test/test_futureautomaton.py @@ -140,6 +140,9 @@ assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) + assert f.lineno == 4 + assert f.col_offset == 0 + def test_semicolon(): s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,); from __future__ import with_statement\n' f = run(s) From commits-noreply at bitbucket.org Tue Mar 1 05:03:38 2011 From: commits-noreply at bitbucket.org (vincentlegoll) Date: Tue, 1 Mar 2011 05:03:38 +0100 (CET) Subject: [pypy-svn] pypy default: Fix test_mac_style_lineendings() failure by correctly counting Message-ID: <20110301040338.47C812A2031@codespeak.net> Author: Vincent Legoll Branch: Changeset: r42358:1190dfc1638f Date: 2011-03-01 01:12 +0100 http://bitbucket.org/pypy/pypy/changeset/1190dfc1638f/ Log: Fix test_mac_style_lineendings() failure by correctly counting macos-style lines diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -168,7 +168,7 @@ if c == '\r': if self.getc() == '\n': self.pos += 1 - self.atbol() + self.atbol() else: self.atbol() self.start() From commits-noreply at bitbucket.org Tue Mar 1 05:03:40 2011 From: commits-noreply at bitbucket.org (vincentlegoll) Date: Tue, 1 Mar 2011 05:03:40 +0100 (CET) Subject: [pypy-svn] pypy default: Add tests for continuation lines being acceptable before __future__ Message-ID: <20110301040340.2C8C62A2031@codespeak.net> Author: Vincent Legoll Branch: Changeset: r42359:63ae44cc20d2 Date: 2011-03-01 01:14 +0100 http://bitbucket.org/pypy/pypy/changeset/63ae44cc20d2/ Log: Add tests for continuation lines being acceptable before __future__ imports. CPython 2.7 allows them. One test is disabled as it's a cpython behaviour we probably don't want to emulate. Those tests currently fail. diff --git a/pypy/interpreter/pyparser/test/test_futureautomaton.py b/pypy/interpreter/pyparser/test/test_futureautomaton.py --- a/pypy/interpreter/pyparser/test/test_futureautomaton.py +++ b/pypy/interpreter/pyparser/test/test_futureautomaton.py @@ -204,5 +204,37 @@ f = run(s) assert f.pos == len(s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT + +def test_continuation_line(): + s = "\\\nfrom __future__ import with_statement\n" + f = run(s) + assert f.pos == len(s) + assert f.flags == fut.CO_FUTURE_WITH_STATEMENT assert f.lineno == 2 assert f.col_offset == 0 + +def test_continuation_lines(): + s = "\\\n \t\\\nfrom __future__ import with_statement\n" + f = run(s) + assert f.pos == len(s) + assert f.flags == fut.CO_FUTURE_WITH_STATEMENT + assert f.lineno == 3 + assert f.col_offset == 0 + +# This looks like a bug in cpython parser +# and would require extensive modifications +# to future.py in order to emulate the same behaviour +def __test_continuation_lines_raise(): + s = " \\\n \t\\\nfrom __future__ import with_statement\n" + try: + f = run(s) + except IndentationError, e: + assert e.args == 'unexpected indent' + assert f.pos == len(s) + assert f.flags == 0 + assert f.lineno == -1 + assert f.col_offset == 0 + else: + raise AssertionError('IndentationError not raised') + assert f.lineno == 2 + assert f.col_offset == 0 From commits-noreply at bitbucket.org Tue Mar 1 05:03:41 2011 From: commits-noreply at bitbucket.org (vincentlegoll) Date: Tue, 1 Mar 2011 05:03:41 +0100 (CET) Subject: [pypy-svn] pypy default: Fix continuation lines handling. The tests all pass. Message-ID: <20110301040341.0FAC72A2031@codespeak.net> Author: Vincent Legoll Branch: Changeset: r42360:2564812a55ee Date: 2011-03-01 01:16 +0100 http://bitbucket.org/pypy/pypy/changeset/2564812a55ee/ Log: Fix continuation lines handling. The tests all pass. diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -85,7 +85,7 @@ c = self.getc() if c in ("'", '"', "r", "u") and not self.docstring_consumed: self.consume_docstring() - elif c in whitespace_or_newline: + elif c == '\\' or c in whitespace_or_newline: self.consume_empty_line() elif c == '#': self.consume_comment() @@ -149,6 +149,12 @@ # Syntax error return + def consume_continuation(self): + c = self.getc() + if c in '\n\r': + self.pos += 1 + self.atbol() + def consume_empty_line(self): """ Called when the remainder of the line can only contain whitespace @@ -162,6 +168,10 @@ self.pos += 1 self.consume_whitespace() self.start() + elif self.getc() in '\\': + self.pos += 1 + self.consume_continuation() + self.start() elif self.getc() in '\r\n': c = self.getc() self.pos += 1 From commits-noreply at bitbucket.org Tue Mar 1 05:03:41 2011 From: commits-noreply at bitbucket.org (gutworth) Date: Tue, 1 Mar 2011 05:03:41 +0100 (CET) Subject: [pypy-svn] pypy default: merge with future fixes Message-ID: <20110301040341.644D52A2079@codespeak.net> Author: Benjamin Peterson Branch: Changeset: r42361:50a0062831ab Date: 2011-02-28 21:55 -0600 http://bitbucket.org/pypy/pypy/changeset/50a0062831ab/ Log: merge with future fixes From commits-noreply at bitbucket.org Tue Mar 1 05:03:42 2011 From: commits-noreply at bitbucket.org (gutworth) Date: Tue, 1 Mar 2011 05:03:42 +0100 (CET) Subject: [pypy-svn] pypy default: kill trailing whitespace Message-ID: <20110301040342.0AFDC2A2031@codespeak.net> Author: Benjamin Peterson Branch: Changeset: r42362:bd2a7d3e7fe8 Date: 2011-02-28 21:56 -0600 http://bitbucket.org/pypy/pypy/changeset/bd2a7d3e7fe8/ Log: kill trailing whitespace diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -26,7 +26,7 @@ from pypy.interpreter.astcompiler.consts import CO_GENERATOR_ALLOWED, \ CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSOLUTE_IMPORT - + def get_futures(future_flags, source): futures = FutureAutomaton(future_flags, source) try: @@ -34,7 +34,7 @@ except DoneException, e: pass return futures.flags, (futures.lineno, futures.col_offset) - + class DoneException(Exception): pass @@ -226,7 +226,7 @@ if self.getc() not in whitespace + '\\': raise DoneException self.consume_whitespace() - + def consume_whitespace(self): while 1: c = self.getc() @@ -272,7 +272,6 @@ if paren_list and self.getc() == ')': self.pos += 1 return - if (self.getc() == 'a' and self.getc(+1) == 's' and self.getc(+2) in whitespace): From commits-noreply at bitbucket.org Tue Mar 1 05:03:42 2011 From: commits-noreply at bitbucket.org (gutworth) Date: Tue, 1 Mar 2011 05:03:42 +0100 (CET) Subject: [pypy-svn] pypy default: use test skipping Message-ID: <20110301040342.C16AA2A2072@codespeak.net> Author: Benjamin Peterson Branch: Changeset: r42363:36c913eb1084 Date: 2011-02-28 22:02 -0600 http://bitbucket.org/pypy/pypy/changeset/36c913eb1084/ Log: use test skipping diff --git a/pypy/interpreter/pyparser/test/test_futureautomaton.py b/pypy/interpreter/pyparser/test/test_futureautomaton.py --- a/pypy/interpreter/pyparser/test/test_futureautomaton.py +++ b/pypy/interpreter/pyparser/test/test_futureautomaton.py @@ -224,7 +224,8 @@ # This looks like a bug in cpython parser # and would require extensive modifications # to future.py in order to emulate the same behaviour -def __test_continuation_lines_raise(): +def test_continuation_lines_raise(): + py.test.skip("probably a CPython bug") s = " \\\n \t\\\nfrom __future__ import with_statement\n" try: f = run(s) From ronny at codespeak.net Tue Mar 1 13:24:17 2011 From: ronny at codespeak.net (ronny at codespeak.net) Date: Tue, 1 Mar 2011 13:24:17 +0100 (CET) Subject: [pypy-svn] r80403 - pypy/extradoc/planning/hg-migration Message-ID: <20110301122417.2E533282BDE@codespeak.net> Author: ronny Date: Tue Mar 1 13:24:14 2011 New Revision: 80403 Added: pypy/extradoc/planning/hg-migration/fix_usermap.py Modified: pypy/extradoc/planning/hg-migration/usermap.txt Log: grab most email addresses from codespeaks forwarding config, only a few unknown entires are missing now Added: pypy/extradoc/planning/hg-migration/fix_usermap.py ============================================================================== --- (empty file) +++ pypy/extradoc/planning/hg-migration/fix_usermap.py Tue Mar 1 13:24:14 2011 @@ -0,0 +1,30 @@ +""" +fix_usermap +:takes a name: realmail listing, fixes up a authormap (see usermap.txt) + +""" +import sys + +fixups = {} +with open(sys.argv[2]) as fp: + for line in fp: + try: + name, mail = line.split(':') + fixups[name.strip()] = mail.strip() + except ValueError: + pass + + +with open(sys.argv[1]) as fp: + for line in fp: + if 'codespeak.net' not in line: + sys.stdout.write(line) + else: + before = line.split('<')[0] + name = line.split('=')[0] + if name in fixups: + sys.stdout.write('%s<%s>\n'% (before, fixups[name])) + else: + sys.stdout.write(line) + sys.stderr.write('didnnt find %s\n'%name) + Modified: pypy/extradoc/planning/hg-migration/usermap.txt ============================================================================== --- pypy/extradoc/planning/hg-migration/usermap.txt (original) +++ pypy/extradoc/planning/hg-migration/usermap.txt Tue Mar 1 13:24:14 2011 @@ -20,126 +20,126 @@ xoraxax=Alexander Schremmer rxe=Richard Emslie ale=Anders Lehmann -auc=Aurelien Campeas -getxsick=Bartosz Skowron -nik=Niklaus Haldimann -cami=Camillo Bruni +auc=Aurelien Campeas +getxsick=Bartosz Skowron +nik=Niklaus Haldimann +cami=Camillo Bruni lac=Laura Creighton david=David Schneider -sanxiyn=Seo Sanghyeon +sanxiyn=Seo Sanghyeon santagada=Leonardo Santagada -tverwaes=Toon Verwaest -adim=Adrien Di Mascio -rhymes=Lawrence Oluyede +tverwaes=Toon Verwaest +adim=Adrien Di Mascio +rhymes=Lawrence Oluyede jacob=Jacob Hallen -guido=Guido Wesdorp -ludal=Ludovic Aubry +guido=Guido Wesdorp +ludal=Ludovic Aubry jlg=Jakub Gustak bea=Beatrice During hakanardo=Hakan Ardo niko=Niko Matsakis -alex=Alex Martelli -jcreigh=Jason Creighton +alex=Alex Martelli +jcreigh=Jason Creighton iko=Anders Hammarquist agaynor=Alex Gaynor stephan=Stephan Diehl -jandem=Jan de Mooij -pmaupin=Patrick Maupin -sschwarzer=Stefan Schwarzer +jandem=Jan de Mooij +pmaupin=Patrick Maupin +sschwarzer=Stefan Schwarzer tomek=Tomek Meka exarkun=Jean-Paul Calderone -bgola=Bruno Gola +bgola=Bruno Gola dan=Daniel Roberts bob=Bob Ippolito -afayolle=Alexandre Fayolle -simonb=Simon Burton +afayolle=Alexandre Fayolle +simonb=Simon Burton alastair=alastair mgedmin=Marius Gedminas -witulski=John Witulski -nico=Nicolas Chauvat -dialtone=Valentino Volonghi -magcius=Jean-Philippe St. Pierre +witulski=John Witulski +nico=Nicolas Chauvat +dialtone=Valentino Volonghi +magcius=Jean-Philippe St. Pierre trundle=Andreas St?hrk gvanrossum=Guido van Rossum -vinogradov=Pavel Vinogradov -jum=Jens-Uwe Mager -wlav=Wim Lavrijsen -akuhn=Adrian Kuhn -pdg=Paul deGrandis +vinogradov=Pavel Vinogradov +jum=Jens-Uwe Mager +wlav=Wim Lavrijsen +akuhn=Adrian Kuhn +pdg=Paul deGrandis gbrandl=Georg Brandl -gromit=Gerald Klix -wanja=Wanja Saatkamp +gromit=Gerald Klix +wanja=Wanja Saatkamp boria=Boris Feigin -davide=Davide Ancona -oscar=Oscar Nierstrasz -goden=Eugene Oden +davide=Davide Ancona +oscar=Oscar Nierstrasz +goden=Eugene Oden leuschel=Michael Leuschel -docgok=Henry Mason -guenter=Guenter Jantzen -bert=Bert Freudenberg -lukas=Lukas Renggli +docgok=Henry Mason +guenter=Guenter Jantzen +bert=Bert Freudenberg +lukas=Lukas Renggli lene=Lene Wagner -regmee=Amit Regmi -adurdin=Andrew Durdin +regmee=Amit Regmi +adurdin=Andrew Durdin benyoung=Ben Young -bigdog=Michael Schneider -briandorsey=Brian Dorsey -njriley=Nicholas Riley -igorto=Igor Trindade Oliveira -micktwomey=Michael Twomey -rocco=Rocco Moretti +bigdog=Michael Schneider +briandorsey=Brian Dorsey +njriley=Nicholas Riley +igorto=Igor Trindade Oliveira +micktwomey=Michael Twomey +rocco=Rocco Moretti wildchild=Gabriel Lavoie -lucian=Lucian Branescu Mihaila -dinu=Dinu Gherman -jared.grubb=Jared Grubb -karlb=Karl Bartel +lucian=Lucian Branescu Mihaila +dinu=Dinu Gherman +jared.grubb=Jared Grubb +karlb=Karl Bartel odie=Olivier Dormond -haypo=Victor Stinner +haypo=Victor Stinner antoine=Antoine Pitrou -atobe=Toby Watson +atobe=Toby Watson micke=Mikael Sch?nenberg -nshepperd=Neil Shepperd -stuart=Stuart Williams -hruske=Gasper Zejn -justas=Justas Sadzevicius -syt=Sylvain Thenault -alecu=Alejandro J. Cura -electronicru=Alexander Sedov -elmom=Elmo M?ntynen +nshepperd=Neil Shepperd +stuart=Stuart Williams +hruske=Gasper Zejn +justas=Justas Sadzevicius +syt=Sylvain Thenault +alecu=Alejandro J. Cura +electronicru=Alexander Sedov +elmom=Elmo M?ntynen jriehl=Jonathan David Riehl -quest=Anders Qvist -amcintyre=Alan McIntyre -tobami=Miquel Torres -alix=Alix Einfeldt -pzieschang=Pieter Zieschang -aft=Andrew Thompson -blais=Martin Blais -busemann=Stephan Busemann -esmljaos=Jacob Oscarson -henrikv=Henrik Vendelbo -iammisc=Travis Francis Athougies -laszlo=Artur Lisiecki -lucio=Lucio Torre -lutz_p=Lutz Paelike +quest=Anders Qvist +amcintyre=Alan McIntyre +tobami=Miquel Torres +alix=Alix Einfeldt +pzieschang=Pieter Zieschang +aft=Andrew Thompson +blais=Martin Blais +busemann=Stephan Busemann +esmljaos=Jacob Oscarson +henrikv=Henrik Vendelbo +iammisc=Travis Francis Athougies +laszlo=Artur Lisiecki +lucio=Lucio Torre +lutz_p=Lutz Paelike tav=tav -asigfrid=Anders Sigfridsson +asigfrid=Anders Sigfridsson gotcha=Godefroid Chappelle -jacek=Jacek Generowicz -jbaker=Jim Baker -jgilbert=Joshua Gilbert -misto=Fabrizio Milo +jacek=Jacek Generowicz +jbaker=Jim Baker +jgilbert=Joshua Gilbert +misto=Fabrizio Milo niemeyer=Gustavo Niemeyer -radix=Christopher Armstrong +radix=Christopher Armstrong verte=William Leslie -yusei=Yusei Tahara -anthon=Anthon van der Neut -jan=Jan Balster -lamby=Chris Lamb -mcherm=Michael Chermside -zooko=Zooko Wilcox-O Hearn +yusei=Yusei Tahara +anthon=Anthon van der Neut +jan=Jan Balster +lamby=Chris Lamb +mcherm=Michael Chermside +zooko=Zooko Wilcox-O Hearn danchr=Dan Villiom Podlaski Christiansen anna=Anna Ravencroft -dcolish=Dan Colish -dstromberg=Dan Stromberg +dcolish=Dan Colish +dstromberg=Dan Stromberg gintas=Gintautas Miliauskas -ignas=Ignas Mikalajunas +ignas=Ignas Mikalajunas From ronny at codespeak.net Tue Mar 1 13:33:53 2011 From: ronny at codespeak.net (ronny at codespeak.net) Date: Tue, 1 Mar 2011 13:33:53 +0100 (CET) Subject: [pypy-svn] r80404 - pypy/extradoc/planning/hg-migration Message-ID: <20110301123353.BDA75282BDE@codespeak.net> Author: ronny Date: Tue Mar 1 13:33:52 2011 New Revision: 80404 Modified: pypy/extradoc/planning/hg-migration/usermap.txt Log: fix up the missing mails (thanks holger) Modified: pypy/extradoc/planning/hg-migration/usermap.txt ============================================================================== --- pypy/extradoc/planning/hg-migration/usermap.txt (original) +++ pypy/extradoc/planning/hg-migration/usermap.txt Tue Mar 1 13:33:52 2011 @@ -42,18 +42,18 @@ jcreigh=Jason Creighton iko=Anders Hammarquist agaynor=Alex Gaynor -stephan=Stephan Diehl +stephan=Stephan Diehl jandem=Jan de Mooij pmaupin=Patrick Maupin sschwarzer=Stefan Schwarzer -tomek=Tomek Meka +tomek=Tomek Meka exarkun=Jean-Paul Calderone bgola=Bruno Gola dan=Daniel Roberts bob=Bob Ippolito afayolle=Alexandre Fayolle simonb=Simon Burton -alastair=alastair +alastair=Alastair Burt mgedmin=Marius Gedminas witulski=John Witulski nico=Nicolas Chauvat @@ -81,7 +81,7 @@ lene=Lene Wagner regmee=Amit Regmi adurdin=Andrew Durdin -benyoung=Ben Young +benyoung=Ben Young bigdog=Michael Schneider briandorsey=Brian Dorsey njriley=Nicholas Riley @@ -106,7 +106,7 @@ alecu=Alejandro J. Cura electronicru=Alexander Sedov elmom=Elmo M?ntynen -jriehl=Jonathan David Riehl +jriehl=Jonathan David Riehl quest=Anders Qvist amcintyre=Alan McIntyre tobami=Miquel Torres @@ -123,7 +123,7 @@ lutz_p=Lutz Paelike tav=tav asigfrid=Anders Sigfridsson -gotcha=Godefroid Chappelle +gotcha=Godefroid Chappelle jacek=Jacek Generowicz jbaker=Jim Baker jgilbert=Joshua Gilbert @@ -138,8 +138,8 @@ mcherm=Michael Chermside zooko=Zooko Wilcox-O Hearn danchr=Dan Villiom Podlaski Christiansen -anna=Anna Ravencroft +anna=Anna Martelli Ravencroft dcolish=Dan Colish dstromberg=Dan Stromberg -gintas=Gintautas Miliauskas +gintas=Gintautas Miliauskas ignas=Ignas Mikalajunas From ronny at codespeak.net Tue Mar 1 13:57:13 2011 From: ronny at codespeak.net (ronny at codespeak.net) Date: Tue, 1 Mar 2011 13:57:13 +0100 (CET) Subject: [pypy-svn] r80405 - pypy/extradoc/planning/hg-migration Message-ID: <20110301125713.E5C70282BDE@codespeak.net> Author: ronny Date: Tue Mar 1 13:57:11 2011 New Revision: 80405 Removed: pypy/extradoc/planning/hg-migration/usermap.txt Log: remove the usermap, its now in the z repos misc dir From commits-noreply at bitbucket.org Tue Mar 1 14:28:00 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 1 Mar 2011 14:28:00 +0100 (CET) Subject: [pypy-svn] pypy default: Changes CSV Reader and Writer to be newstyle classes, worth a couple percent on issue 641. Message-ID: <20110301132800.87659282BDE@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42364:dcd82ffead95 Date: 2011-03-01 08:27 -0500 http://bitbucket.org/pypy/pypy/changeset/dcd82ffead95/ Log: Changes CSV Reader and Writer to be newstyle classes, worth a couple percent on issue 641. diff --git a/lib_pypy/_csv.py b/lib_pypy/_csv.py --- a/lib_pypy/_csv.py +++ b/lib_pypy/_csv.py @@ -194,8 +194,7 @@ names = csv.list_dialects()""" return list(_dialects) -class Reader: - +class Reader(object): """CSV reader Reader objects are responsible for reading and parsing tabular data @@ -390,7 +389,7 @@ self.field.append(c) -class Writer: +class Writer(object): """CSV writer Writer objects are responsible for generating tabular data From commits-noreply at bitbucket.org Tue Mar 1 15:35:20 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 15:35:20 +0100 (CET) Subject: [pypy-svn] pypy default: Some optimisations to csv.Reader: Message-ID: <20110301143520.B1DF8282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42365:f8386c92c93b Date: 2011-03-01 13:59 +0100 http://bitbucket.org/pypy/pypy/changeset/f8386c92c93b/ Log: Some optimisations to csv.Reader: - self.field is now a string - "c in '\r\n'" is much faster than "c in ('\r', '\n')" diff --git a/lib_pypy/_csv.py b/lib_pypy/_csv.py --- a/lib_pypy/_csv.py +++ b/lib_pypy/_csv.py @@ -213,7 +213,7 @@ self._parse_reset() def _parse_reset(self): - self.field = [] + self.field = '' self.fields = [] self.state = self.START_RECORD self.numeric_field = False @@ -250,7 +250,7 @@ def _parse_process_char(self, c): if self.state == self.IN_FIELD: # in unquoted field - if c in ('\n', '\r', '\0'): + if c in '\n\r\0': # end of line - return [fields] self._parse_save_field() if c == '\0': @@ -272,7 +272,7 @@ if c == '\0': # empty line - return [] pass - elif c in ('\n', '\r'): + elif c in '\n\r': self.state = self.EAT_CRNL else: self.state = self.START_FIELD @@ -280,7 +280,7 @@ self._parse_process_char(c) elif self.state == self.START_FIELD: - if c in ('\n', '\r', '\0'): + if c in '\n\r\0': # save empty field - return [fields] self._parse_save_field() if c == '\0': @@ -348,7 +348,7 @@ # save field - wait for new field self._parse_save_field() self.state = self.START_FIELD - elif c in ('\r', '\n', '\0'): + elif c in '\r\n\0': # end of line - return [fields] self._parse_save_field() if c == '\0': @@ -363,7 +363,7 @@ (self.dialect.delimiter, self.dialect.quotechar)) elif self.state == self.EAT_CRNL: - if c in ('\r', '\n'): + if c in '\r\n': pass elif c == '\0': self.state = self.START_RECORD @@ -376,8 +376,7 @@ raise RuntimeError("unknown state: %r" % (self.state,)) def _parse_save_field(self): - field, self.field = self.field, [] - field = ''.join(field) + field, self.field = self.field, '' if self.numeric_field: self.numeric_field = False field = float(field) @@ -386,7 +385,7 @@ def _parse_add_char(self, c): if len(self.field) >= _field_limit: raise Error("field larget than field limit (%d)" % (_field_limit)) - self.field.append(c) + self.field += c class Writer(object): From commits-noreply at bitbucket.org Tue Mar 1 15:35:21 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 15:35:21 +0100 (CET) Subject: [pypy-svn] pypy default: More optimizations to csv.Reader Message-ID: <20110301143521.4E3F5282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42366:ed70fe1739c6 Date: 2011-03-01 14:46 +0100 http://bitbucket.org/pypy/pypy/changeset/ed70fe1739c6/ Log: More optimizations to csv.Reader diff --git a/lib_pypy/_csv.py b/lib_pypy/_csv.py --- a/lib_pypy/_csv.py +++ b/lib_pypy/_csv.py @@ -234,11 +234,12 @@ self.line_num += 1 - for c in line: - if c == '\0': - raise Error("line contains NULL byte") - self._parse_process_char(c) - self._parse_process_char('\0') + if '\0' in line: + raise Error("line contains NULL byte") + pos = 0 + while pos < len(line): + pos = self._parse_process_char(line, pos) + self._parse_eol() if self.state == self.START_RECORD: break @@ -247,46 +248,46 @@ self.fields = [] return fields - def _parse_process_char(self, c): + def _parse_process_char(self, line, pos): + c = line[pos] if self.state == self.IN_FIELD: # in unquoted field - if c in '\n\r\0': - # end of line - return [fields] - self._parse_save_field() - if c == '\0': - self.state = self.START_RECORD + pos2 = pos + while True: + if c in '\n\r': + # end of line - return [fields] + self._parse_save_field() + self.state = self.EAT_CRNL + elif c == self.dialect.escapechar: + # possible escaped character + self.state = self.ESCAPED_CHAR + elif c == self.dialect.delimiter: + # save field - wait for new field + self._parse_save_field() + self.state = self.START_FIELD else: - self.state = self.EAT_CRNL - elif c == self.dialect.escapechar: - # possible escaped character - self.state = self.ESCAPED_CHAR - elif c == self.dialect.delimiter: - # save field - wait for new field - self._parse_save_field() - self.state = self.START_FIELD - else: - # normal character - save in field - self._parse_add_char(c) + # normal character - save in field + pos2 += 1 + c = line[pos2] + continue + break + if pos2 > pos: + self._parse_add_char(line[pos:pos2]) + pos = pos2 elif self.state == self.START_RECORD: - if c == '\0': - # empty line - return [] - pass - elif c in '\n\r': + if c in '\n\r': self.state = self.EAT_CRNL else: self.state = self.START_FIELD # restart process - self._parse_process_char(c) + self._parse_process_char(line, pos) elif self.state == self.START_FIELD: - if c in '\n\r\0': + if c in '\n\r': # save empty field - return [fields] self._parse_save_field() - if c == '\0': - self.state = self.START_RECORD - else: - self.state = self.EAT_CRNL + self.state = self.EAT_CRNL elif (c == self.dialect.quotechar and self.dialect.quoting != QUOTE_NONE): # start quoted field @@ -308,15 +309,11 @@ self.state = self.IN_FIELD elif self.state == self.ESCAPED_CHAR: - if c == '\0': - c = '\n' self._parse_add_char(c) self.state = self.IN_FIELD elif self.state == self.IN_QUOTED_FIELD: - if c == '\0': - pass - elif c == self.dialect.escapechar: + if c == self.dialect.escapechar: # possible escape character self.state = self.ESCAPE_IN_QUOTED_FIELD elif (c == self.dialect.quotechar @@ -332,8 +329,6 @@ self._parse_add_char(c) elif self.state == self.ESCAPE_IN_QUOTED_FIELD: - if c == '\0': - c = '\n' self._parse_add_char(c) self.state = self.IN_QUOTED_FIELD @@ -348,13 +343,10 @@ # save field - wait for new field self._parse_save_field() self.state = self.START_FIELD - elif c in '\r\n\0': + elif c in '\r\n': # end of line - return [fields] self._parse_save_field() - if c == '\0': - self.state = self.START_RECORD - else: - self.state = self.EAT_CRNL + self.state = self.EAT_CRNL elif not self.dialect.strict: self._parse_add_char(c) self.state = self.IN_FIELD @@ -365,8 +357,6 @@ elif self.state == self.EAT_CRNL: if c in '\r\n': pass - elif c == '\0': - self.state = self.START_RECORD else: raise Error("new-line character seen in unquoted field - " "do you need to open the file " @@ -375,6 +365,38 @@ else: raise RuntimeError("unknown state: %r" % (self.state,)) + return pos + 1 + + def _parse_eol(self): + if self.state == self.EAT_CRNL: + self.state = self.START_RECORD + elif self.state == self.START_RECORD: + # empty line - return [] + pass + elif self.state == self.IN_FIELD: + # in unquoted field + # end of line - return [fields] + self._parse_save_field() + self.state = self.START_RECORD + elif self.state == self.START_FIELD: + # save empty field - return [fields] + self._parse_save_field() + self.state = self.START_RECORD + elif self.state == self.ESCAPED_CHAR: + self._parse_add_char('\n') + self.state = self.IN_FIELD + elif self.state == self.IN_QUOTED_FIELD: + pass + elif self.state == self.ESCAPE_IN_QUOTED_FIELD: + self._parse_add_char('\n') + self.state = self.IN_QUOTED_FIELD + elif self.state == self.QUOTE_IN_QUOTED_FIELD: + # end of line - return [fields] + self._parse_save_field() + self.state = self.START_RECORD + else: + raise RuntimeError("unknown state: %r" % (self.state,)) + def _parse_save_field(self): field, self.field = self.field, '' if self.numeric_field: @@ -383,7 +405,7 @@ self.fields.append(field) def _parse_add_char(self, c): - if len(self.field) >= _field_limit: + if len(self.field) + len(c) > _field_limit: raise Error("field larget than field limit (%d)" % (_field_limit)) self.field += c From commits-noreply at bitbucket.org Tue Mar 1 15:54:55 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 15:54:55 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: - Rewrite void arrays as char arrays, but the pointer is still a void* Message-ID: <20110301145455.E3919282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42367:1f6ef71f3d19 Date: 2011-03-01 14:37 +0100 http://bitbucket.org/pypy/pypy/changeset/1f6ef71f3d19/ Log: - Rewrite void arrays as char arrays, but the pointer is still a void* - allow rffi.cast(rffi.VOIDP, 0) in jitted code - another fix for win32 diff --git a/pypy/translator/c/node.py b/pypy/translator/c/node.py --- a/pypy/translator/c/node.py +++ b/pypy/translator/c/node.py @@ -337,10 +337,15 @@ self.varlength = varlength self.dependencies = {} contained_type = ARRAY.OF + # There is no such thing as an array of voids: + # we use a an array of chars instead; only the pointer can be void*. self.itemtypename = db.gettype(contained_type, who_asks=self) self.fulltypename = self.itemtypename.replace('@', '(@)[%d]' % (self.varlength,)) - self.fullptrtypename = self.itemtypename.replace('@', '*@') + if ARRAY._hints.get("render_as_void"): + self.fullptrtypename = 'void *@' + else: + self.fullptrtypename = self.itemtypename.replace('@', '*@') def setup(self): """Array loops are forbidden by ForwardReference.become() because @@ -681,8 +686,6 @@ def getptrname(self): if barebonearray(self.getTYPE()): - if self.getTYPE()._hints.get("render_as_void"): - return '(void *)%s' % self.name return self.name return ContainerNode.getptrname(self) diff --git a/pypy/rlib/rwin32.py b/pypy/rlib/rwin32.py --- a/pypy/rlib/rwin32.py +++ b/pypy/rlib/rwin32.py @@ -191,7 +191,7 @@ assert buflen > 0 result = rffi.charpsize2str(buf[0], buflen) - LocalFree(buf[0]) + LocalFree(rffi.cast(rffi.VOIDP, buf[0])) finally: lltype.free(buf, flavor='raw') diff --git a/pypy/rpython/lltypesystem/lltype.py b/pypy/rpython/lltypesystem/lltype.py --- a/pypy/rpython/lltypesystem/lltype.py +++ b/pypy/rpython/lltypesystem/lltype.py @@ -810,6 +810,8 @@ return cast_pointer(TGT, value) elif ORIG == llmemory.Address: return llmemory.cast_adr_to_ptr(value, TGT) + elif ORIG == Signed: + return cast_int_to_ptr(TGT, value) elif TGT == llmemory.Address and isinstance(ORIG, Ptr): return llmemory.cast_ptr_to_adr(value) elif TGT == Signed and isinstance(ORIG, Ptr) and ORIG.TO._gckind == 'raw': From commits-noreply at bitbucket.org Tue Mar 1 15:59:32 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Tue, 1 Mar 2011 15:59:32 +0100 (CET) Subject: [pypy-svn] pypy default: don't modify the result of space.listview Message-ID: <20110301145932.35C1C282BDE@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42368:6429f8e51662 Date: 2011-03-01 15:59 +0100 http://bitbucket.org/pypy/pypy/changeset/6429f8e51662/ Log: don't modify the result of space.listview diff --git a/pypy/objspace/std/bytearrayobject.py b/pypy/objspace/std/bytearrayobject.py --- a/pypy/objspace/std/bytearrayobject.py +++ b/pypy/objspace/std/bytearrayobject.py @@ -539,9 +539,10 @@ if not space.is_w(w_by, space.w_None): w_by = space.wrap(space.bufferstr_new_w(w_by)) w_list = space.call_method(w_str, "split", w_by, w_maxsplit) - list_w = space.listview(w_list) - for i in range(len(list_w)): - list_w[i] = String2Bytearray(space, list_w[i]) + length = space.int_w(space.len(w_list)) + for i in range(length): + w_i = space.wrap(i) + space.setitem(w_list, w_i, String2Bytearray(space, space.getitem(w_list, w_i))) return w_list def str_rsplit__Bytearray_ANY_ANY(space, w_bytearray, w_by, w_maxsplit=-1): @@ -549,9 +550,10 @@ if not space.is_w(w_by, space.w_None): w_by = space.wrap(space.bufferstr_new_w(w_by)) w_list = space.call_method(w_str, "rsplit", w_by, w_maxsplit) - list_w = space.listview(w_list) - for i in range(len(list_w)): - list_w[i] = String2Bytearray(space, list_w[i]) + length = space.int_w(space.len(w_list)) + for i in range(length): + w_i = space.wrap(i) + space.setitem(w_list, w_i, String2Bytearray(space, space.getitem(w_list, w_i))) return w_list def str_partition__Bytearray_ANY(space, w_bytearray, w_sub): From lac at codespeak.net Tue Mar 1 16:14:01 2011 From: lac at codespeak.net (lac at codespeak.net) Date: Tue, 1 Mar 2011 16:14:01 +0100 (CET) Subject: [pypy-svn] r80406 - pypy/extradoc/pycon-advertisement Message-ID: <20110301151401.61595282BDE@codespeak.net> Author: lac Date: Tue Mar 1 16:13:59 2011 New Revision: 80406 Added: pypy/extradoc/pycon-advertisement/pycon2011flyer.pdf Log: What got shipped to the printer Added: pypy/extradoc/pycon-advertisement/pycon2011flyer.pdf ============================================================================== --- (empty file) +++ pypy/extradoc/pycon-advertisement/pycon2011flyer.pdf Tue Mar 1 16:13:59 2011 @@ -0,0 +1,3514 @@ +%PDF-1.3 %???? +1 0 obj <>]/Pages 3 0 R/Type/Catalog/ViewerPreferences<>>> endobj 2 0 obj <>stream + + + + + uuid:a76538ab-b185-423e-afd3-4a72818e3efd + xmp.did:43E1BC703143E011BDA7A29D2EF5EDE5 + adobe:docid:indd:58656b8a-3e67-11df-8aa5-f49606768846 + proof:pdf + 1 + + xmp.iid:9F64BCE82643E011BDA7A29D2EF5EDE5 + xmp.did:B44F9690F8BFDF11BFCBA697A360D144 + adobe:docid:indd:58656b8a-3e67-11df-8aa5-f49606768846 + default + + + + + saved + xmp.iid:A62E8A8EFA266811BA65F5CCC021424C + 2010-06-23T07:02:37+02:00 + Adobe InDesign 6.0 + / + + + saved + xmp.iid:A72E8A8EFA266811BA65F5CCC021424C + 2010-06-23T07:02:37+02:00 + Adobe InDesign 6.0 + /metadata + + + saved + xmp.iid:F0D9A0AE9E82DF118A6EFE4471713F3F + 2010-06-28T12:19:49+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:F1D9A0AE9E82DF118A6EFE4471713F3F + 2010-06-28T12:19:49+02:00 + Adobe InDesign 7.0 + /metadata + + + saved + xmp.iid:B06D6D94F588DF11B20EA9572674A80B + 2010-07-06T13:56:58+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:7AFBF2DAF688DF11B20EA9572674A80B + 2010-07-06T14:06:06+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:7BFBF2DAF688DF11B20EA9572674A80B + 2010-07-06T14:29:26+02:00 + Adobe InDesign 7.0 + /metadata + + + saved + xmp.iid:412ACE1DFA88DF11B20EA9572674A80B + 2010-07-06T14:29:26+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:758E2282BD89DF1187ABA3D9C1F4CA97 + 2010-07-07T13:48:06+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:24B21E89BD89DF1187ABA3D9C1F4CA97 + 2010-07-07T13:48:18+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:E9EDD334BE89DF1187ABA3D9C1F4CA97 + 2010-07-07T13:53:06+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:EAEDD334BE89DF1187ABA3D9C1F4CA97 + 2010-07-07T13:59:37+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:7AE98042C089DF1187ABA3D9C1F4CA97 + 2010-07-07T14:07:48+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0ACD1362C189DF1187ABA3D9C1F4CA97 + 2010-07-07T14:15:51+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:68C54E19C389DF1187ABA3D9C1F4CA97 + 2010-07-07T14:28:08+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:32AA0CCEC389DF11838DA91A97CA8345 + 2010-07-07T14:33:11+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:585FC6D6248BDF11A66ED4F0096EC00C + 2010-07-09T08:40:18+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:AD489E192B8BDF11A66ED4F0096EC00C + 2010-07-09T09:25:07+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:FEA623C8308BDF11A66ED4F0096EC00C + 2010-07-09T10:05:47+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0A3C9674338BDF11A66ED4F0096EC00C + 2010-07-09T10:24:56+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:4B6836BA338BDF11A66ED4F0096EC00C + 2010-07-09T10:26:52+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:60664274378BDF11A66ED4F0096EC00C + 2010-07-09T10:53:33+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0EFB5987378BDF11A66ED4F0096EC00C + 2010-07-09T10:54:05+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0FFB5987378BDF11A66ED4F0096EC00C + 2010-07-09T11:17:47+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:10FB5987378BDF11A66ED4F0096EC00C + 2010-07-09T11:21:47+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:CDDD19A13B8BDF11A66ED4F0096EC00C + 2010-07-09T11:23:26+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:5F8CACBD3B8BDF11A66ED4F0096EC00C + 2010-07-09T11:24:14+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:8EF5D10E3C8BDF11A66ED4F0096EC00C + 2010-07-09T11:26:30+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:25DFCF5E349ADF118FAA91DF06C51565 + 2010-07-28T12:39:16+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:30BF3068349ADF118FAA91DF06C51565 + 2010-07-28T12:39:32+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:A417C5DD349ADF118FAA91DF06C51565 + 2010-07-28T12:42:49+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:2DD88113359ADF118FAA91DF06C51565 + 2010-07-28T12:44:19+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:D4F5668C359ADF118FAA91DF06C51565 + 2010-07-28T12:47:42+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:34728DA1359ADF118FAA91DF06C51565 + 2010-07-28T12:48:17+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:79D0F3EC359ADF118FAA91DF06C51565 + 2010-07-28T12:50:24+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:099FD41F369ADF118FAA91DF06C51565 + 2010-07-28T12:51:49+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:20E59439369ADF118FAA91DF06C51565 + 2010-07-28T12:52:32+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:D85BF3CD369ADF118FAA91DF06C51565 + 2010-07-28T12:56:41+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9DE91AD5379ADF118FAA91DF06C51565 + 2010-07-28T13:04:03+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:D28420EA389ADF118FAA91DF06C51565 + 2010-07-28T13:11:48+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0B80E310399ADF118FAA91DF06C51565 + 2010-07-28T13:12:53+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:014D8B18399ADF118FAA91DF06C51565 + 2010-07-28T13:13:06+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:5AC9B437399ADF118FAA91DF06C51565 + 2010-07-28T13:13:58+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:A420AD65399ADF118FAA91DF06C51565 + 2010-07-28T13:15:15+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:1735A8B0399ADF118FAA91DF06C51565 + 2010-07-28T13:17:21+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:C2AC9E4B3A9ADF118FAA91DF06C51565 + 2010-07-28T13:21:41+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:405F347A3A9ADF118FAA91DF06C51565 + 2010-07-28T13:22:59+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:47A1F7953A9ADF118FAA91DF06C51565 + 2010-07-28T13:23:45+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:ABC36AC83A9ADF118FAA91DF06C51565 + 2010-07-28T13:25:10+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:115653CE3C9ADF11A699E23703B53C32 + 2010-07-28T13:39:39+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:0A17ADE73C9ADF11A699E23703B53C32 + 2010-07-28T13:40:22+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:68D71E513D9ADF11A699E23703B53C32 + 2010-07-28T13:43:18+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:A76607863D9ADF11A699E23703B53C32 + 2010-07-28T13:44:47+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:95BA1C52A5B5DF11A2FDE7FA81431FF0 + 2010-09-01T10:45:49+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:3C1F4A82A5B5DF11A2FDE7FA81431FF0 + 2010-09-01T10:47:10+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:5670C38DA5B5DF11A2FDE7FA81431FF0 + 2010-09-01T10:47:29+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:AF993BDAA9B5DF11A2FDE7FA81431FF0 + 2010-09-01T11:18:16+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:69118DAAAAB5DF11A2FDE7FA81431FF0 + 2010-09-01T11:24:05+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:7246A38EAEB5DF11A2FDE7FA81431FF0 + 2010-09-01T11:51:56+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:1C02C5ABAEB5DF11A2FDE7FA81431FF0 + 2010-09-01T11:52:45+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BD7B26B7AEB5DF11A2FDE7FA81431FF0 + 2010-09-01T11:53:04+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BDCD8EA797B6DF1197EAED9BA5ACAB6D + 2010-09-02T15:40:31+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:06FA7FCD97B6DF1197EAED9BA5ACAB6D + 2010-09-02T15:41:34+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:968093B598B6DF1197EAED9BA5ACAB6D + 2010-09-02T15:48:04+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BD9B22A373BEDF11A7F9BA7C016C7538 + 2010-09-12T15:42:51+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:E0409FC476BEDF11A7F9BA7C016C7538 + 2010-09-12T16:05:15+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:A1288F90F8BFDF11BFCBA697A360D144 + 2010-09-14T14:06:54+02:00 + Adobe InDesign 7.0 + /metadata + + + saved + xmp.iid:B44F9690F8BFDF11BFCBA697A360D144 + 2010-09-14T14:06:54+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:B64F9690F8BFDF11BFCBA697A360D144 + 2010-09-14T14:18:10+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:B84F9690F8BFDF11BFCBA697A360D144 + 2010-09-14T14:27:11+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:94AF5B51FCBFDF11BFCBA697A360D144 + 2010-09-14T14:33:46+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:97AF5B51FCBFDF11BFCBA697A360D144 + 2010-09-14T14:35:45+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9AAF5B51FCBFDF11BFCBA697A360D144 + 2010-09-14T14:46:36+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:40A1FB1DFFBFDF11BFCBA697A360D144 + 2010-09-14T14:53:48+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:43A1FB1DFFBFDF11BFCBA697A360D144 + 2010-09-14T14:56:35+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:46A1FB1DFFBFDF11BFCBA697A360D144 + 2010-09-14T15:19:26+02:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:950776C70A43E011BDA7A29D2EF5EDE5 + 2011-02-28T08:17:19+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9A0776C70A43E011BDA7A29D2EF5EDE5 + 2011-02-28T08:19:27+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:F1430C3C0D43E011BDA7A29D2EF5EDE5 + 2011-02-28T08:34:54+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:DA129DC50D43E011BDA7A29D2EF5EDE5 + 2011-02-28T08:38:45+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:E0129DC50D43E011BDA7A29D2EF5EDE5 + 2011-02-28T09:47:18+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:5E53FADD1743E011BDA7A29D2EF5EDE5 + 2011-02-28T11:11:02+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9864BCE82643E011BDA7A29D2EF5EDE5 + 2011-02-28T11:38:41+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9D64BCE82643E011BDA7A29D2EF5EDE5 + 2011-02-28T11:39:11+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9E64BCE82643E011BDA7A29D2EF5EDE5 + 2011-02-28T12:48:02+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:9F64BCE82643E011BDA7A29D2EF5EDE5 + 2011-02-28T12:54:04+01:00 + Adobe InDesign 7.0 + /metadata + + + saved + xmp.iid:43E1BC703143E011BDA7A29D2EF5EDE5 + 2011-02-28T12:54:04+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:48E1BC703143E011BDA7A29D2EF5EDE5 + 2011-02-28T13:12:57+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:B6C15ADC3743E011BDA7A29D2EF5EDE5 + 2011-02-28T13:40:02+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BCC15ADC3743E011BDA7A29D2EF5EDE5 + 2011-02-28T13:41:15+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BDC15ADC3743E011BDA7A29D2EF5EDE5 + 2011-02-28T13:57:35+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:B881C3543C43E011BDA7A29D2EF5EDE5 + 2011-02-28T14:12:02+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:BE81C3543C43E011BDA7A29D2EF5EDE5 + 2011-02-28T14:26:26+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:638B945F3E43E011BDA7A29D2EF5EDE5 + 2011-02-28T14:26:39+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:C40EE5103F43E011BDA7A29D2EF5EDE5 + 2011-02-28T14:31:36+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:4EA118E34043E0118F02D023F837F76A + 2011-02-28T14:44:38+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:B987CB414243E0118F02D023F837F76A + 2011-02-28T14:54:27+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:087814A1D243E01182B2D0C93D70A969 + 2011-03-01T08:07:54+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:8B2AC61DE143E01182B2D0C93D70A969 + 2011-03-01T09:51:36+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:E64FB0E7E343E01182B2D0C93D70A969 + 2011-03-01T10:11:34+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:EC4FB0E7E343E01182B2D0C93D70A969 + 2011-03-01T10:13:14+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:6BB19E6AE743E01182B2D0C93D70A969 + 2011-03-01T10:36:42+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:6CB19E6AE743E01182B2D0C93D70A969 + 2011-03-01T10:39:46+01:00 + Adobe InDesign 7.0 + /;/metadata + + + saved + xmp.iid:72B19E6AE743E01182B2D0C93D70A969 + 2011-03-01T10:42:14+01:00 + Adobe InDesign 7.0 + /;/metadata + + + + + + 2011-03-01T10:44:54+01:00 + 2011-03-01T10:44:56+01:00 + 2011-03-01T10:44:56+01:00 + Adobe InDesign CS5 (7.0) + + + + 1 + JPEG + 256 + 256 + /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4AE0Fkb2JlAGSAAAAAAQUAAgAg/9sAhAAMCAgICAgMCAgMEAsLCxAUDg0NDhQY EhMTExIYFBIUFBQUEhQUGx4eHhsUJCcnJyckMjU1NTI7Ozs7Ozs7Ozs7AQ0LCxAOECIYGCIyKCEo MjsyMjIyOzs7Ozs7Ozs7Ozs7Ozs7OztAQEBAQDtAQEBAQEBAQEBAQEBAQEBAQEBAQED/wAARCAEA ALUDAREAAhEBAxEB/8QBQgAAAQUBAQEBAQEAAAAAAAAAAwABAgQFBgcICQoLAQABBQEBAQEBAQAA AAAAAAABAAIDBAUGBwgJCgsQAAEEAQMCBAIFBwYIBQMMMwEAAhEDBCESMQVBUWETInGBMgYUkaGx QiMkFVLBYjM0coLRQwclklPw4fFjczUWorKDJkSTVGRFwqN0NhfSVeJl8rOEw9N14/NGJ5SkhbSV xNTk9KW1xdXl9VZmdoaWprbG1ub2N0dXZ3eHl6e3x9fn9xEAAgIBAgQEAwQFBgcHBgI7AQACEQMh MRIEQVFhcSITBTKBkRShsUIjwVLR8DMkYuFygpJDUxVjczTxJQYWorKDByY1wtJEk1SjF2RFVTZ0 ZeLys4TD03Xj80aUpIW0lcTU5PSltcXV5fVWZnaGlqa2xtbm9ic3R1dnd4eXp7fH1+f3/9oADAMB AAIRAxEAPwD1Cqqv02exv0R2Hgkpl6Vf7jfuCSlelX+437gkpXpV/uN+4JKV6Vf7jfuCSlelX+43 7gkpXpV/uN+4JKV6Vf7jfuCSlelX+437gkpXpV/uN+4JKV6Vf7jfuCSlelX+437gkpXpV/uN+4JK V6Vf7jfuCSlelX+437gkpXpV/uN+4JKV6Vf7jfuCSlelX+437gkpXpV/uN+4JKV6Vf7jfuCSlelX +437gkpXpV/uN+4JKYmqv1G+xv0Xdh4tSUyq/m2f1R+RJTNJTkfWnIzMbpFlmGXVjcBfcz6VVUHc 9urfIcjxkcp0KvVZkJA0eZ+0fVnGoOT0/OyPtQIa213u9R5gbbHVt9k7uZb8+FUyShIkgniXQIBe nOT1c/YGsqfutpL8gljAGuDqB791jdste4w2dfhBsRutUo2ZXXrXY/6Es9XWyGsLGBrqWOLi60O1 G9zI14kchFTodNuy8jFbkZlfovthwq7sEAQT3kgn4FJTaSUpJSklKSUpJSklKSUpJSklKSUpJTRf j9ULyWZbQ2SQPTaYHgkpX2bqsH9cb/22ElKGN1Uc5jT8awkps47L2VxkWC18/SA26fBJTI/zjf6r vytSUqr+bZ/VH5ElM0lLEAiDqCkpz8b6u9ExMk5ePh1MtncHRIaf5DTIZ/ZhNEIg3SqdFOUpJSkl KSUpJSklKSUpJSklKSUpJSklKSUpJSklKSUpJSklMD/ON/qu/K1JSqv5tn9UfkSUzSUgzbvQp3+v VjagepeJZ8Pp1/lSQTQaeRX+0qKW1dQbW+XA2Yzi0PcWEwALTwDu5KSDqN2gH9R9lhy2OqPqPbFr R7G5VTi5z/VIcBUDwBA9qSzXuvUOrVPaLLPQNJqsybb7fUb9C9r3Fn2iNhdBGjfhokocSWjIza6O nvfc3I27PUrrsZvdvrsYNz337bJsGkeHdJIJoIcWnqzsLGrFzxfcS2xxtDi6mxrN91Z+0WyWQII2 88BJAEqWw8rJbZ64tOQyvItssDMitwNIbaAfflFu1u9k6NjTlJQJS5F+bnPysXptwcMi1ppyGXs2 sZXXX6gbse54O7wZ3SSSTdIc67qz8iaMmvHsFVBuofcwem5pc+15bv4bAnxBSRIytdmRl49jNznA CwNFT8lpsscantc335L2mHPY7Tbz9HhJVkIqL+pV1vdfl12MtbWwFmS3d+gdWyzaXObtda0kyDyR MFJAJ7pqaerXObbFwodQ9rDTdvMuddtfL8wjcWuadd/ySSBJs5FPW3YWI3Gb6d1RN1wNpl2w+2ou c6wnfPBdH8pJcRKg08jB+sByfUY651T7rHljbSIZufsBH2mrsRG1zfNJaYytvdRxeq3ZL7cNzmM+ zsrY0vc0Cx7rGvd7bAJY1wMlruNNUl0hInRFVidVc/HblesdlTqrXNuLWe31A2xrq7mOc53tncz7 kkAFF9h60xm1hvLjhNaHG4ujILX79TkCDuI12u8oSRwy/B1+m1ZNOIKssl1jX2DcXF0t3uLCC5z3 RtjkykyRBAbSSVJKYH+cb/Vd+VqSlVfzbP6o/IkpmkpBnY32zDuxN2z1mOZuiY3CJjRJBFho39EN t9mTVkOrtfY+xp9xa3fT6H0N4bI53RPZJaYatez6q1upFDMl4ZW2xtJLRLBbs3TtLQR7Xdh9JJHt aNzL6ZdkZFl9d7a22CmWurLjux3usYZFjdJOohJcY2Wrk/VoZLWE5BZa3e8va3Q2Osda1wbv02l7 oE90lpx23B0s03Y9uHb6Yoa9hZaH2gtf6ejZsbtj09I08kl3DWzVr+rxbjY+NbkbxjvBkNd764bu qO+18B2wfRgeSSBj0Z4fRsnC+zbMprvszXsG+t7pY/05HuvO0/o+2nkkoQIVk9Dfktsq9cNpe+21 o9OXh9zHsdL9+rRvMCPmkowthV9X3Y9zLaMiBXY97Wva8+x4paGEstrkNFMayD3CShjopaOhVt2D KsF7K2uaxgDmtaC6p7WsmxxDW+nxJSUIOjVVXRUympoaytoa1o4AGgCS8aM0lMDbU0wXtBHIJCSl evT/AKRv3hJSvXp/0jfvCSmQIcJBkHghJS6SlJKUkpgf5xv9V35WpKVV/Ns/qj8iSmaSlJKUkpSS lJKUkpSSlJKUkpSSlJKUkpSSlJKc6/oHSMm5999G6yw7nO3vEn4BwSUw/wCbPRP+43/gln/k0lK/ 5s9E/wC43/gln/k0lMh9XejgQKCB/wAZZ/5NJS7fq/0ljg5tLgWmR+ks5H9tJTopKUkpgf5xv9V3 5WpKVV/Ns/qj8iSmaSmh1nKtw8I3U2soduaN9oc5uvaGNefwSU4P/ODqH/ljif8Abd3/AKQSUr/n B1D/AMscT/tu7/0gkpX/ADg6h/5Y4n/bd3/pBJSv+cHUP/LHE/7bu/8ASCSlf84Oof8Aljif9t3f +kElK/5wdQ/8scT/ALbu/wDSCSlf84Oof+WOJ/23d/6QSUr/AJwdQ/8ALHE/7bu/9IJKV/zg6h/5 Y4n/AG3d/wCkElK/5wdQ/wDLHE/7bu/9IJKV/wA4Oof+WOJ/23d/6QSUr/nB1D/yxxP+27v/AEgk pX/ODqH/AJY4n/bd3/pBJSv+cHUP/LHE/wC27v8A0gkpX/ODqH/ljif9t3f+kElOjg/WTBbRHUMt j7pOtVdm2O3NYSU2W/WTor3BjciS4gAbLOT/AGElOmkpSSmB/nG/1XflakpVX82z+qPyJKZpKcz6 wWCrp5ebfR97ffsFn/RKSnmftrP+5/8A7LNSQr7az/uf/wCyzUlK+2s/7n/+yzUlK+2s/wC5/wD7 LNSUr7az/uf/AOyzUlK+2s/7n/8Ass1JSvtrP+5//ss1JSvtrP8Auf8A+yzUlK+2s/7n/wDss1JS vtrP+5//ALLNSUr7az/uf/7LNSUr7az/ALn/APss1JSvtrP+5/8A7LNSU6OD07N6jR9pxs5hYSW+ 6hoMhJLY/wCb/VP+5tf/AGy3+5JSv+b/AFT/ALm1/wDbLf7klMq+g9Ta9rnZtZAIJHotEx8klO8k pSSmB/nG/wBV35WpKVV/Ns/qj8iSmaSlJKa2fnV9PoF9jLLAXBu2oBztZ7EjwSU0B9Z8M84+UPjW P4OSUv8A85sL/QZP/bf/AJkkpX/ObC/0GT/23/5kkpX/ADmwv9Bk/wDbf/mSSlf85sL/AEGT/wBt /wDmSSlf85sL/QZP/bf/AJkkpX/ObC/0GT/23/5kkpX/ADmwv9Bk/wDbf/mSSlf85sL/AEGT/wBt /wDmSSlf85sL/QZP/bf/AJkkpX/ObC/0GT/23/5kkpX/ADmwv9Bk/wDbf/mSSlf85sL/AEGT/wBt /wDmSSnQw8uvNoGRW17GuJEWDa7TThJSdJSklKSUpJTA/wA43+q78rUlKq/m2f1R+RJTNJSklNbP bnvoA6c+tl24SbZ27dZ4DklNBtX1qA1uw3fEP/gwJKX9P60/6TC+5/8A5FJSvT+tP+kwvuf/AORS Ur0/rT/pML7n/wDkUlK9P60/6TC+5/8A5FJSvT+tP+kwvuf/AORSUr0/rT/pML7n/wDkUlK9P60/ 6TC+5/8A5FJSvT+tP+kwvuf/AORSUr0/rT/pML7n/wDkUlK9P60/6TC+5/8A5FJTZwG9ZbY79pPo dXt9op3TunvuASU3klKSUpJSklKSUpJTA/zjf6rvytSUqr+bZ/VH5ElM0lKSU5vXv2f9hH7S3+j6 gj0+d0OhJTgsP1TA0syGa8e7+ASUy3fVP/TZH3v/ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ALkl K3fVP/TZH3v/ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ALklK3fVP/TZH3v/ALklOpV9W+lXVstY64tsaHNO88ES ElM/+a/TPG7/ALcKSnXSUpJSklKSUwP843+q78rUlKq/m2f1R+RJTNJSklNbPdnsoB6cyt924SLZ 27dZ4LUlNAX/AFp/OxsU/BxH/fykpf1/rP8A9xsb/OP/AJJJSvX+s/8A3Gxv84/+SSUr1/rP/wBx sb/OP/kklK9f6z/9xsb/ADj/AOSSUr1/rP8A9xsb/OP/AJJJSvX+s/8A3Gxv84/+SSUr1/rP/wBx sb/OP/kklK9f6z/9xsb/ADj/AOSSUr1/rP8A9xsb/OP/AJJJSvX+s/8A3Gxv84/+SSUr1/rP/wBx sb/OP/kklNvp9nVH+p+0qqqoj0/SJM87pknySU3ElKSUpJSklKSUwP8AON/qu/K1JSqv5tn9UfkS UzSUpJTn9bGEcMfb77MereIfVO7dBgaNckpxGN+roHs6rltH9Zw/9FJKZR9X/wDy2y/893/pJJSo +r//AJbZf+e7/wBJJKVH1f8A/LbL/wA93/pJJSo+r/8A5bZf+e7/ANJJKVH1f/8ALbL/AM93/pJJ So+r/wD5bZf+e7/0kkpUfV//AMtsv/Pd/wCkklKj6v8A/ltl/wCe7/0kkpUfV/8A8tsv/Pd/6SSU qPq//wCW2X/nu/8ASSSlR9X/APy2y/8APd/6SSUlxsTo2ZcKMbqmXZY6SGiwjgSeawkp6RJSklKS UpJSklMD/ON/qu/K1JSqv5tn9UfkSUzSUpJTU6kcoY4+yY7MqzcP0dhAEQdfcQkpy93W/wDynx/8 6v8A8kkpW7rf/lPj/wCdX/5JJSt3W/8Aynx/86v/AMkkpW7rf/lPj/51f/kklK3db/8AKfH/AM6v /wAkkpW7rf8A5T4/+dX/AOSSUrd1v/ynx/8AOr/8kkpW7rf/AJT4/wDnV/8AkklK3db/APKfH/zq /wDySSlbut/+U+P/AJ1f/kklK3db/wDKfH/zq/8AySSlbut/+U+P/nV/+SSU2Onnqhym/aenU41c GbWOYXDTT6LiUlOukpSSlJKUkpSSmB/nG/1XflakpVX82z+qPyJKZpKUkpodaNQxAbst+E3eP0tc zMH2+1JTiN+wu1HX8j5uePylJS8YX/l/f/nu/vSUqML/AMv7/wDPd/ekpUYX/l/f/nu/vSUqML/y /v8A89396SlRhf8Al/f/AJ7v70lKjC/8v7/89396SlRhf+X9/wDnu/vSUqML/wAv7/8APd/ekpUY X/l/f/nu/vSUyrdg12Meeu3vDXBxaXugweDqkp1/290f/uUz8f7klM6us9LvsbTTkMe95hrRMk/c kpupKUkpSSlJKUkpgf5xv9V35WpKVV/Ns/qj8iSmaSlJKaXVvU+yj0sRmc7eP0T4jg+73Skpym/a I9/1epJ8vTH/AHwpKXm3/wCd2r76/wD0mkpU2/8Azu1ffX/6TSUqbf8A53avvr/9JpKVNv8A87tX 31/+k0lKm3/53avvr/8ASaSlTb/87tX31/8ApNJSpt/+d2r76/8A0mkpU2//ADu1ffX/AOk0lKm3 /wCd2r76/wD0mkpU2/8Azu1ffX/6TSUqbf8A53avvr/9JpKZV2ZNTxZV9X62PaZa5rqwQfIitJTs 4lt1+Oy3IpOPY6d1Rdu2wSBqAOQkpMkpSSlJKUkpgf5xv9V35WpKVV/Ns/qj8iSmFjLC8ltz2D91 oZA/zmEpKY+lb/3It+6v/wBJJIpXpW/9yLfur/8ASSSqV6Vv/ci37q//AEkkqlelb/3It+6v/wBJ JKpXpW/9yLfur/8ASSSqQ5Ry6Kt9D7bnlzW7PYNCYJkUu4+CR0SBZa2PnZmRZ6fpZlXtcd1jamtk DQas7pBRHiq7OyqnOaKsywNdtDmNqIOkz/NpDVRGm7A9SywY9HMdLdwAbXP0tsa1Bs8H6XCAukmN dUwycomsEZP6QA6NrlsuLTM1DgalHoikb+oZIYS1uSX/ALhayZgOP+B7T2TRK78F5x1Wu6X7Rlbt rvtDYYXudFZaCG7tvtpJOumgS4vBHB4sHZmU2k2j7Qf0QsjaPpkwWf0ee/h9ycFtN307f+5Fv3V/ +kkkUv6Vv/ci37q//SSSqV6Vv/ci37q//SSSqV6Vv/ci37q//SSSqV6Vv/ci37q//SSSqV6Vv/ci 37q//SSSqVQbBkWVvsdY0MY4bg3QuNgP0Wt/dSV1Sn+cb/Vd+VqSVqj+jZ/VH5ElKPKSms5govqF ZI9Z59STM7azH5E2ZICQnkqLjl3XUGFTnOqa5xkkCSkZytVBnJS45d1UERsf6L3TqC4A/AwjxG1U Gb3OFjADoSZ+5DjkqgpjnFzwToHAD/NaUuOSqDKSlxy7q4QxDneq5s6BrTHxLv7kuOVKoMpKXHLu qgxLneo1s6FriR8C3+9LjlSqDKSlxy7qoKkpccu6qDBrnGx4J0ER8wlxyVQR5mUMWoWOFjgXtbFT DYdT4NB00T8YlM1a2chAXS/2tvqOr22EtdtMMMSRuBnw80zil3XcIR353pl1YruJgje2skAw2P8A qkeKQKhEFd2extZt9O4wSNorduO2JMfNN9w913tpG5O8sGx7Q+CC4RyJRE5ELTEBbEe51pc4yTRU Sf7VqnY+rYJ/SN/qu/K1JLGo/o2f1R+RJTLukpq2Pc7Ix9zCyHO5jWa3HSEzJsmO7YUK9HR/Ms/q hE7qCRBSB38xZ/Wd/wBUUeqOiR/87X8T+QpBKmfTs/rD/qWoFTNJTAfzzv6jfyuS6KZpKYH+eZ/V d+ViXRTSyif21gCTBryJHbitT4/5mf0YZ/z0fI/sdBQMyNn87Z/Z/Ij0Uq9rXNG4Aw5vPm4JBRX9 Gn9xv3BCyphdTUKnkMaIaew8EQTamfo0/uN+4JWVKbVW07msaCO4AQsqRYf85/1ir8tqssXVsE/p G/1XflakljUf0bP6o/IkpmkpBkf0jG/rP/6hyZk2THdMoV6Oj+ZZ/VCJ3UEiCkDv5iz+s7/qij1R 0SP/AJ2v4n8hSCVM+nZ/WH/UtQKmaSmA/nnf1G/lcl0UzSUwP88z+q78rEuimjlf8tdP/wCLyPyV qfH/ADE/owT/AJ6Pkf2OioGdGz+ds/s/kR6KXt+iP6zf+qCAUWaSmF38zZ/VP5EhupmkpSSmti/z n/WKvy2qyxdU5P6Rv9V35WpJY1H9Gz+qPyJKauTfkM6t0+iskU3NvNuuhLGhzBHxSUy6g8Y5xniu 20Ne5oZTJdqx2v0gmz2SEP7RH/cLP/zT/wClFCuYVdQHpM/U8/gfmH/0oid1M/2iP+4Wf/mn/wBK IKQnqH6F/wCp53LvzD+8f+ER6qZv6iPUZ+p53J/MPgf+ESUpnURuf+pZ/wBIfmH91v8AwiSmf7RH /cLP/wA0/wDpRBTAdRHqu/Us/wCi38w+Lv8AhEeimf7RH/cLP/zT/wClEFMD1Eeq39Tz/ou/MPi3 /hEeimpk589WwT9jzdK79CwydK+P0imh/My+jDP+ej5H9je/aI/7hZ/+af8A0ooGZg3qI9R/6lnd vzD4f8YipVnURtH6lnfSbyw/vD/hEgpn+0R/3Cz/APNP/pRBTC3qI9J/6nn/AETyw+H/ABiI3Uq3 q1VFb7rcTOZXWC57i0wANSf5xKMTI0ESkIiyser0g1g4md+mMV+w6+0v/wBJ4BHhOvgoyArxbmMf eP8AiKvy2qZZ1TE/pG/1XflakljUf0bP6o/Ikpq5NjW9X6cyRue3IhuxriQGj88+5o+HKSkguvuv o9ag0bXe3c5rt26oud9H906JmTZMd24oV6Oj+ZZ/VCJ3UEiCkDv5iz+s7/qij1R0SP8A52v4n8hS CVM+nZ/WH/UtQKlMuqsssqY4OfUQLGjlpcNwn5ImJAB7oEgSQofzzv6jfyuQ6JZpKYH+eZ/Vd+Vi XRTRyv8Alrp//F5H5K1Pj/mJ/Rgn/PR8j+x0VAzub1C22rKxBW8tFmVWx4H5zTXYYP3KfDEGMr/d YcxIMa7/ALC37foj+s3/AKoKAMxZpKYXfzNn9U/kSG6mr1v/AJHzv/C9v/UlS8t/PR8ww81/Mz8i wf8AznSv6x/88WJw/T/l1Ud4fy6JaP5wf8RV+W1OT1Sk/pG/1XflakljUf0bP6o/IkphaC7Ow3ct YLT9KNXAAGPzoE/fKSl+pWnHay8Dcaha8A8EtreY/BDh4iB4oMuEEpcW45GNTe4bTbW15A4BcAYU M48MiF8JcUQV6P5ln9UIHdcEiCkDv5iz+s7/AKoo9UdEj/52v4n8hSCVM+nZ/WH/AFLUCppYH/Kf U/69P/npqny/zUPr+bDi/nJ/T8m6P5539Vv5XKDozM0lNDrGTbh4V2VQQLKqXuaSJEgs7KXl4CeQ RPdi5iZhjMh0DDJM9Y6cT3qyPyVp8P5mfmP2rZ/z0fI/sb9d1Vrnsre17qzteAZLXRMFQGJAFhmE gSaLndT/AKXg/wDhyv8A892qfB8s/wC7+0MOf5of3v2F0bfoj+sz/qgq4Z2aSmF38zZ/VP5Ehupq 9b/5Hzv/AAvb/wBSVLy389HzDDzX8zPyLB/850r+sf8AzxYnD9P+XVR3h/Lokp/nB/xFX5bU5PVI T+kb/Vd+VqSWNR/Rs/qj8iSmL8e+3NxL2kimltvqAOEOLxtALSPyJKa/VMWqjEIYXkenb9J7nfRo c0cnwajD54+YW5PkPkvTdVi4HTy5rnev6NIhxEF7eefJMlAzyT8LKIz4McfGm5TUw1MOvA/Od/eo SdWdn6Nfn/nO/vStVIXVM9Cw6/Sd+c794+aN6oSPqZ6lfOpP5zvA+aAKmnlWuozcWmv6ORe6uyS6 YFJeI18QpccRKEieg/axZJmM4juf2FbBrYepdSBmA+n84/6JvmjlP6uH1/NWL+cn9PyZ597cLbYG F/qWU0wXuEeq8snvxKbih7hI8CU5cnti/ED7W56Nfn/nO/vUVstOd9YK2t6XlET/ADD+5PdniVY5 Q/ro+bBzf8zLyXyK2ftfp411qv8Azj4V+aUD+pn5j9qp/wA9HyP7GPSa2HM6oDOmUO5/0bPNO5g/ q8fks5b+cyf3v2K6nW0ZWEBP9LrHJ/0dvmm4D6Z/3f2hfn+aH979hbLrsZ2S/Dbu9WoVvdq6Nr3Q NZ8lHwSEBLoV4yRMzHqGz6Nfn/nO/vTLZKc3NLmdWx6GveK3417nM3OgluyCRPaVYxgHBI9bDWnI jmIi9KKTq/oP6R1A1O3Gum1rocTDgwmDqm8uCM0L7hdzJBwzrsfyWfWzf0vnVx/OP+gf5pA6T/l1 Sd4fy6JqtLG/8RV+W1OT1Zk/pG/1XflakljUf0bP6o/Ikpt1fzY+aSnN6m+9+K/1qxVFVu33bpmh 5cPkdEY/PHzC3J8h8kN3/J/Rv+Oxv+oKUP5zJ5SWH+bh9HVo/mWf1Qqx3bISIKQO/mLP6zv+qKPV HRI/+dr+J/IUhslzeof8p9P/APDT/wD23cp8P83Py/a18385Dz/7kpcD/lPqf9en/wA9NQy/zUPr +a7F/OT+n5Iuu/Qp/wDDOJ/59R5X5j/dl+S3mvlH96P5uqq7Ycz6w/8AJWV/xFn5WKflP56Pmwc3 /My8l8j/AJY6d/xV/wCStGH8zPzH7VT/AJ6Pkf2Mekf03qn/AIaH/nutHmf5vH/dWct/OZP737Ar qf8AS8H/AMOV/wDnu1DB8s/7v7Qvz/ND+9+wsGf8v5f/ABON/wCfHp0v9zR8ysh/umX90ftddVW0 5Gf/AMt43/hXI/8ARatY/wDc8v7wauT/AHTH+6f2ILf+S+vf8Zkf+emJ4/ncX0/NZP8Amsv1/JvP /nOlf1j/AOeLFCP8p/LqzneH8ujJn843/iKvy2pyerIn9I3+q78rUksaj+jZ/VH5ElNyj+ab8/yp KafWv6Kf+Lv/APPT0Y/PHzC3J8h8mpd/yf0b/jsb/qClD+cyeUlh/m4fR0cbIqc77K0/pa62Pe2D o1+4NM8fmlV5QIHF0ZxMcXD1RZ3UBT05+fiFtwBaGmZaZeKzx8VJiwk5OGWiyeYDHxR1a9z7R1aq kOd6bsfJcWSdpItrgkfNOiB7JPiPyWkn3gP6p/Yjzsq+7pnUHk7XUOyK2OZLSAxvt1nlPx44xzQH emPLkkcEz2tV0nJ6STJJu1J/8LOSjtk/l+kmW+Pz/wC5LYwpHUeqEch9R/8AAmqPL/Nw+v5r8Z9c /wCXRoXZN2b0rCyr49S2/ELtogT6x4GqnjAY80ojtL8mGUzPDGR7x/N3vUZ6npbh6gG7ZPu2kxMe CpcJq25YunM6/YyzpWWa3B22mxrtpmHAskHzCn5UEZo33YOaIOGXkzyAf2x07/isj8laUP5mfmP2 pn/PR8j+xh0pwZldWe7RrcmSfACtidzAuGPy/as5Y1PJ/e/YFs+xl13T7qjursyqnMcOCDVaQQhh iYiYP7v7QuyyEjAjv+wqYD+38v8A4nG/8+PRl/uaPmVsP90y/uj9qRnVnjpY6jZUCTb6Wxpga3ei DJ+9NPLj3eAHp+y14zn2+IjrX40jzwf25jf+Fcj/ANFp2P8A3PL+8Fk/90x/un9jXt/5L69/xmR/ 56Ynj+dxfT81k/5rL9fybzwfU6V/WP8A54sUI/T/AJdQzneH8ui7f5xv/EV/lsTk9Vyf0jf6rvyt SSxqP6Nn9UfkSU3aNaR5z+VJTXycHDGM9vpVwA2BY4sbLW+m0F39XRLZR1aj8bAaDZ9jf9mpIiwv dyJaPRrDieYHblGzZPdHCKAW9DDY50YL/VefTqY2wmx4aCZsO+GN8JKF6Umhdo/T6a2pgdhRQDtv cHuFVbgTIa120vO5vZvKPGeK71RwRAqtE4xMOxn2kYFxaQAwFzhcd593tc8bW9zJ+SF6Umhd9Vsn CwsbC9bIxa5c79Kw3FrAHE7nOc4+7286ao8RsFHDEgitCwbV042S/DcxjZNJe93qPI9oNVU7hpzx AQs6+KuEaeDCMMCwMwicky81NtIho0Bvs3bQdDproldpoLto6Y5rC3Eb9lraHuuDyapb2oaDLiHn naEeI3fVbwRqq0Xqbg2WPH2F7b9GMYHuN206g2wf0bfi5DpXRdQu+rBzelDHNlmNWyoO25L32OFD S7Vwlxb6jvgOUeI3aOCNEVoUjR0x72ufiua4tJpaXvN5BA4r5aDHchC9KTQu1NowwxxGEBaXO9UN udsa1o1dfb9GY7alEkmvBAiBfixrZ0l9dV9WJOPI9Ozc8TAIHoV/SPgONEuI6+KuCNDTbZmMbHDH 5FnTn1uLwxo9Q7y1p+lY7dDWjt7iheldFcIu+rGqvpNtQ2YrTQHEvfvcKRtPuczdG+HRqBz3R4jd 9VcEaqtFbemuv224b2vc0+iC577nNJ9x2MLnMZ8T8kAaFdFcIJvqqirp+VWBj4YudYSLwyxxrafz g9/0XGNDzrojxGx4K4RRHdtDpmM7J2fZiKax7bHWOkuIEem3doIJBQ7poMrmtrydjdGtprA+AdYk jqwJ/SN/qu/K1JLCo/o2f1R+RJTex7KxS0OcAddCfNJS9jcS4tN3pv2GW7oME/FJTDJZXk1eiMh1 An3Gpwa4iPoh3b5JKZUMxMdgrp2Ma0BoiJgeKSmTvsz3tsfsc9n0XGCW/ApKZerV++37wkpDkl9r Ayi2queXvAcWxwWt4keaSmt9i/SR9qLaRDpDybnO7k2l2g0HtGiSmWTh0vrZXjGluzRrbRvraYP6 TZI3P+JSUmx6MbHcbvUFl7wGvteRuIHYRo0eQSUnFlImHNEmTBGpSUhOPhOyftdm2ywABm8hwZAI lgPB15SU16+n0VuLBduofLrQ5022OJn32zJb5JKT3OcxjKcP02NcSHv3AemD+c1se4n4hJSqqcem bBYLbo+na8u1gcSTtBjskpBZj3ZVe7KyGCyDFLHH0Jn88Atc/TsdElJcfGxaXi6ywXX97Xkac/Qb w0a9klLvxcCyv0nRs3BzgHxvjgPg+4fFJSdr6GNDGOY1rdAAQAPkkpf1av32/eElNLJcDlFzTI9N gkcSHWf3pI6oif0jf6rvytSSxqP6Nn9UfkSU18zqNGGwvtcGiYEzJPgAAfFJbKQju0f+cuJ5/wCa UlvvRV/zlxPP/NKSveir/nLief8AmlJXvRV/zlxPP/NKSveir/nLief+aUle9FX/ADlxPP8AzSkr 3oq/5y4nn/mlJXvRV/zlxPP/ADSkr3oq/wCcuJ5/5pSV70Vf85cTz/zSkr3oq/5y4nn/AJpSV70V f85cTz/zSkr3oq/5y4nn/mlJXvRV/wA5cTz/AM0pK96Kv+cuJ5/5pSV70Vf85cTz/wA0pK96Kv8A nLief+aUle9FX/OXE8/80pK96LJn1jwnPDXO2gmC4tdA+MApK92LqMeHtDh3AI76ESCPiEmRRP6R v9V35WpKYVH9Gz+qPyJKeb+spJyMYf8AAk/P1bh/BJr5vmcgQTBMDx5RWRiD1Zba/wB/8Cku9uPd W2v9/wDApK9uPdW2v9/8Ckr2491ba/3/AMCkr2491ba/3/wKSvbj3Vtr/f8AwKSvbj3Vtr/f/ApK 9uPdW2v9/wDApK9uPdW2v9/8Ckr2491ba/3/AMCkr2491ba/3/wKSvbj3WIZ+9+BSV7ce6+2v9/8 Ckr2491ba/3/AMCkr2491ba/3/wKSvbj3Vtr/f8AwKSvbj3WIYBo6T4QkiUABuxSWPZdFM9KxSeT W7X4W2gfgEG1i+UNon9I3+q78rUl7Go/o2f1R+RJTk9b6bdmenZRtL6mlmwkN3MJdYCC4xILjp8E mLLAy1Dj/sjqH+i/6Tf/ACSTF7cuyv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R1D/R f9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R 1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cu yv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSS V7cuyv2R1D/Rf9Jv/kkle3Lsr9kdQ/0X/Sb/AOSSV7cuyv2R1D/Rf9Jv/kkle3Lsyr6L1Gx230w3 xLnsA/6pJXtyeow6PsuLVjB24VN27oiSS57jr/KcYSbERwikhP6Rv9V35WpLkVd1fps97fojuPBJ TL1av32/eElLepT+837wkpXqU/vN+8JKV6lP7zfvCSlepT+837wkpXqU/vN+8JKV6lP7zfvCSlep T+837wkpXqU/vN+8JKV6lP7zfvCSlepT+837wkpXqU/vN+8JKV6lP7zfvCSlepT+837wkpXqU/vN +8JKV6lP7zfvCSlepT+837wkpXqU/vN+8JKV6lP7zfvCSl/Vq/eb94SUr1q/32/eElMTdX6jfe36 Lu48WpKf/9k= + + + 2 + JPEG + 256 + 256 + /9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA AQBIAAAAAQAB/+4AE0Fkb2JlAGSAAAAAAQUAAgAg/9sAhAAKBwcHBwcKBwcKDgkJCQ4RDAsLDBEU EBAQEBAUEQ8RERERDxERFxoaGhcRHyEhISEfKy0tLSsyMjIyMjIyMjIyAQsJCQ4MDh8XFx8rIh0i KzIrKysrMjIyMjIyMjIyMjIyMjIyMjI+Pj4+PjJAQEBAQEBAQEBAQEBAQEBAQEBAQED/wAARCAEA ALUDAREAAhEBAxEB/8QBogAAAAcBAQEBAQAAAAAAAAAABAUDAgYBAAcICQoLAQACAgMBAQEBAQAA AAAAAAABAAIDBAUGBwgJCgsQAAIBAwMCBAIGBwMEAgYCcwECAxEEAAUhEjFBUQYTYSJxgRQykaEH FbFCI8FS0eEzFmLwJHKC8SVDNFOSorJjc8I1RCeTo7M2F1RkdMPS4ggmgwkKGBmElEVGpLRW01Uo GvLj88TU5PRldYWVpbXF1eX1ZnaGlqa2xtbm9jdHV2d3h5ent8fX5/c4SFhoeIiYqLjI2Oj4KTlJ WWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+hEAAgIBAgMFBQQFBgQIAwNtAQACEQMEIRIxQQVRE2Ei BnGBkTKhsfAUwdHhI0IVUmJy8TMkNEOCFpJTJaJjssIHc9I14kSDF1STCAkKGBkmNkUaJ2R0VTfy o7PDKCnT4/OElKS0xNTk9GV1hZWltcXV5fVGVmZ2hpamtsbW5vZHV2d3h5ent8fX5/c4SFhoeIiY qLjI2Oj4OUlZaXmJmam5ydnp+So6SlpqeoqaqrrK2ur6/9oADAMBAAIRAxEAPwDomgaBoU2habNN ptpJJJaQO7vBGzMzRoSzEpUknFUw/wAOeXv+rVZf9I8X/NGKu/w55e/6tVl/0jxf80Yq7/Dnl7/q 1WX/AEjxf80Yq7/Dnl7/AKtVl/0jxf8ANGKu/wAOeXv+rVZf9I8X/NGKu/w55e/6tVl/0jxf80Yq 7/Dnl7/q1WX/AEjxf80Yq7/Dnl7/AKtVl/0jxf8ANGKu/wAOeXv+rVZf9I8X/NGKu/w55e/6tVl/ 0jxf80Yq7/Dnl7/q1WX/AEjxf80Yq7/Dnl7/AKtVl/0jxf8ANGKu/wAOeXv+rVZf9I8X/NGKu/w5 5e/6tVl/0jxf80Yq7/Dnl7/q1WX/AEjxf80Yq7/Dnl7/AKtVl/0jxf8ANGKu/wAOeXv+rVZf9I8X /NGKu/w55e/6tVl/0jxf80Yq7/Dnl7/q1WX/AEjxf80Yq7/Dnl7/AKtVl/0jxf8ANGKu/wAOeXv+ rVZf9I8X/NGKu/w55e/6tVl/0jxf80Yq7y5/yj2lf8wVv/yaTFUyxV2KuxV2KuxV2KuxV2KuxV2K uxV2KuxV2KuxV2KuxVASW2qtIzR3iohYlVMamgJ2FfbFVv1bV/8AltT/AJFDFXC11gDe+U+5iXFU XbJcRx8bmQTPX7QXjt4UxVWxVLfLn/KPaV/zBW//ACaTFUyxVA6rqtvo9utzcq7o7iMCMAmpDN+0 y/y4qlP+OdJ/3zc/8Cn/AFVxV3+OdJ/3zc/8Cn/VXFXf450n/fNz/wACn/VXFXf450n/AHzc/wDA p/1VxV3+OdJ/3zc/8Cn/AFVxV3+OdJ/3zc/8Cn/VXFU+tLmO9toruIEJModQ1AaHxoTiqtirsVdi rsVdirsVdirsVdirsVdirsVdiqW+XP8AlHtK/wCYK3/5NJiqZYqlXmGR47JGSeG2Pqgc7hean4X2 A4vvirHPrU//AFc9O/5Ej/qhhQ761P8A9XPTv+RI/wCqGKu+tT/9XPTv+RI/6oYq761P/wBXPTv+ RI/6oYq761P/ANXPTv8AkSP+qGKu+tT/APVz07/kSP8Aqhiqquq6iihU1qyVRsAI6Af8kcCV36X1 T/q92f8AwB/6pYq79L6n/wBXuz/4A/8AVLFXfpfU/wDq92f/AAB/6pYqmEUHmuaNJotQtmSRQ6ME 2IIqD/d4qu+qeb/+W+2/4D/r3irvqnm//lvtv+A/694qqW9r5pWeNri9geEOpkVUoSlfiA/djtiq d4q7FXYq7FXYq7FUt8uf8o9pX/MFb/8AJpMVTLFVG5gtbiPjdxxyxp8dJVDKCAfi+LbpiqW/86n/ ANq7/kjirv8AnU/+1d/yRxV3/Op/9q7/AJI4q7/nU/8AtXf8kcVRSaVo0iLJHZ2ro4DKyxRkEHcE ELiq79D6T/yw23/IpP8AmnFXfofSf+WG2/5FJ/zTirv0RpP/ACw2/wDyKT/mnFXfofSf+WG2/wCR Sf8ANOKu/Q+k/wDLDbf8ik/5pxVFIiRoscahEQBVVRQADYAAYquxV2KuxV2KuxV2KuxV2KuxVLfL n/KPaV/zBW//ACaTFUyxVSuBW3lAUNVG+FjQHY7E1GKsH+pz/wDVpsv+kkf9lWFDvqc//Vpsv+kk f9lWKu+pz/8AVpsv+kkf9lWKoiwsIpbuNNQ02zgtjXnItxUj4Tx2Fw3enbAllcVxp8ESQxTRLHGo RF5jZVFAOuKr/rtn/wAtEX/Br/XFXfXbP/loi/4Nf64q767Z/wDLRF/wa/1xV312z/5aIv8Ag1/r irvrtn/y0Rf8Gv8AXFXfXbP/AJaIv+DX+uKu+u2f/LRF/wAGv9cVd9ds/wDloi/4Nf64q767Z/8A LRF/wa/1xVWBBAINQdwRireKuxV2KuxV2Kpb5c/5R7Sv+YK3/wCTSYqmWKqdwOVvKvEvVGHAGhbb pX3xVhP6MX/qXp/+khv+qeKHfoxf+pen/wCkhv8Aqnirv0Yv/UvT/wDSQ3/VPFXfoxf+pen/AOkh v+qeKu/Ri/8AUvT/APSQ3/VPFXfoxf8AqXp/+khv+qeKu/Ri/wDUvT/9JDf9U8Vd+jF/6l6f/pIb /qnirv0Yv/UvT/8ASQ3/AFTxV36MX/qXp/8ApIb/AKp4q79GL/1L0/8A0kN/1TxV36MX/qXp/wDp Ib/qnirv0Yv/AFL0/wD0kN/1TxV36MX/AKl6f/pIb/qniqcx61rEUaxpocoVAFH73sBT/feKU5sZ 57m1Se4gNrK9eULHkVoSBvQdQK4qiMVdirsVdiqW+XP+Ue0r/mCt/wDk0mKpliqyRBLG8bVAdSpp 1oRTFUj/AMHaZ/v25/5GD/mnFXf4O0z/AH7c/wDIwf8ANOKu/wAHaZ/v25/5GD/mnFXf4O0z/ftz /wAjB/zTiqN0zQ7TSpHkt3lcyLxPqNyFAa7bDFUyxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Kux VLfLn/KPaV/zBW//ACaTFUyxVL9Zu9QsrVJdOt/rUpkCslGaikMS3wkdwMVST9Peav8Aq1f8k5P+ asVd+nvNX/Vq/wCScn/NWKtrrvmgsA2lUBIqfTk/5qxVlWKuxV2KuxV2KuxV2KuxV2KuxV2KuxV2 KuxV2KuxV2Kpb5c/5R7Sv+YK3/5NJiqZYqk/mVYmsYxKty6+sNrMgPXi/WoPw4qxn07P/fOsfev/ AFTxQ707P/fOsfev/VPFXenZ/wC+dY+9f+qeKu9Oz/3zrH3r/wBU8Vd6dn/vnWPvX/qnirvTs/8A fOsfev8A1TxV3p2f++dY+9f+qeKu9Oz/AN86x96/9U8Vd6dn/vnWPvX/AKp4q4R2YP8Acax96/8A VPFXenZ/751j71/6p4q707P/AHzrH3r/ANU8Vd6dn/vnWPvX/qnirvTs/wDfOsfev/VPFXenZ/75 1j71/wCqeKq1nNa2VzHcpbarI0RJCyUKmopuOAxVkNhr/wBeultvqVxBzBPqSrRRQV3OKU3xV2Ku xVLfLn/KPaV/zBW//JpMVTLFUHqVlLfwLDFcy2bK4f1ISQxABHHYjbfFUs/w3ef9Xm8/4Nv+a8Vd /hu8/wCrzef8G3/NeKu/w3ef9Xm8/wCDb/mvFXf4bvP+rzef8G3/ADXirv8ADd5/1ebz/g2/5rxV 3+G7z/q83n/Bt/zXirv8N3n/AFebz/g2/wCa8Vd/hu8/6vN5/wAG3/NeKu/w3ef9Xm8/4Nv+a8Vd /hu8/wCrzef8G3/NeKu/w3ef9Xm8/wCDb/mvFXf4bvP+rzef8G3/ADXirv8ADd5/1ebz/g2/5rxV OrWFre3jgeRpmjUKZHNWanc4qq4q7FXYq7FXYq7FUt8uf8o9pX/MFb/8mkxVMsVS/WdV/RFqlz6J uOcgj4qaEVDNXof5cVST/G//AGr5P+D/AObMVd/jf/tXyf8AB/8ANmKu/wAb/wDavk/4P/mzFXf4 3/7V8n/B/wDNmKu/xv8A9q+T/g/+bMVZXirsVdirsVdirsVdirsVdirsVdirsVdirsVdiqW+XP8A lHtK/wCYK3/5NJiqZYqlPmOcW9jG5vHsKygerGhkJ+F/hoCvzxVjf6TX/qYZ/wDpHb/qpih36TX/ AKmGf/pHb/qpirv0mv8A1MM//SO3/VTFXfpNf+phn/6R2/6qYq79Jr/1MM//AEjt/wBVMVd+k1/6 mGf/AKR2/wCqmKu/Sa/9TDP/ANI7f9VMVd+k1/6mGf8A6R2/6qYq79Jr/wBTDP8A9I7f9VMVd+k1 /wCphn/6R2/6qYq79Jr/ANTDP/0jt/1UxV36TX/qYZ/+kdv+qmKu/Sa/9TDP/wBI7f8AVTFXfpNf +phn/wCkdv8Aqpirv0mv/Uwz/wDSO3/VTFXfpNf+phn/AOkdv+qmKp75auPrAuWGoSaiFKD95GY+ H2ulWatcUp7irsVdiqW+XP8AlHtK/wCYK3/5NJiqZYqlXmGZ4LJHS6SyJlA9SWP1Afhf4ePB8VY5 +kJ/+r5bf9Iw/wCqGKHfpCf/AKvlt/0jD/qhirv0hP8A9Xy2/wCkYf8AVDFXfpCf/q+W3/SMP+qG Ku/SE/8A1fLb/pGH/VDFXfpCf/q+W3/SMP8Aqhirv0hP/wBXy2/6Rh/1QxV36Qn/AOr5bf8ASMP+ qGKu/SE//V8tv+kYf9UMVd+kJ/8Aq+W3/SMP+qGKu/SE/wD1fLb/AKRh/wBUMVd+kJ/+r5bf9Iw/ 6oYq79IT/wDV8tv+kYf9UMVd+kJ/+r5bf9Iw/wCqGKu/SE//AFfLb/pGH/VDFXfpCf8A6vlt/wBI w/6oYqyfQ5kn09XE6XUgZlkmjT0wxBqPh4r0UjtilMcVdirsVS3y5/yj2lf8wVv/AMmkxVMsVSrz CzrZIUktoj6o+K8FY/svsPhb4sVY56k//LXo3/AD/qjih3qT/wDLXo3/AAA/6o4q71J/+WvRv+AH /VHFXepP/wAtejf8AP8AqjirvUn/AOWvRv8AgB/1RxV3qT/8tejf8AP+qOKu9Sf/AJa9G/4Af9Uc VRdvp+sXcfrWzaVLHUjkkQIqP+eOKVX9C+YP5dM/5Ff9ecVd+hfMH8umf8iv+vOKu/QvmD+XTP8A kV/15xV36F8wfy6Z/wAiv+vOKu/QvmD+XTP+RX/XnFXfoXzB/Lpn/Ir/AK84q79C+YP5dM/5Ff8A XnFXfoXzB/Lpn/Ir/rziqYaTaa3azcbxrUWtCeFspU8zSh/u1GKpxirsVdiqW+XP+Ue0r/mCt/8A k0mKpliqGv4rGW2ZtRRHgi/eH1BULQEcvxxVJfV8l+Fr/wAD/ZirvV8l+Fr/AMD/AGYq71fJfha/ 8D/ZirvV8l+Fr/wP9mKu9XyX4Wv/AAP9mKu9XyX4Wv8AwP8AZirvV8l+Fr/wP9mKo6x1TQIylnYT RJzaiRoCKs30YqmuKuxV2KuxV2KuxV2KuxV2KuxV2KuxVLfLn/KPaV/zBW//ACaTFUyxVC6jX6hP T0q8D/f09L/Z8tqYqxH99/2ov+SWKHfvv+1F/wAksVd++/7UX/JLFXfvv+1F/wAksVd++/7UX/JL FXfvv+1F/wAksVd++/7UX/JLFVySXMTrJE2iI6GqspjBBHcEYqiv0xrn/Ldpf/I1f+asUoqOTzfK iyRNZOjCqspJBHiCMVTTS/0twk/S3pc6j0/RrSnetcVR2KuxV2KuxV2KuxV2KuxV2KpX5cP/ADr2 lf8AMFb/APJpMVTKuKobUQWsZ1CJKShASQ0RvZjUbYqxL6rP/wBWzTv+Rw/6rYUO+qz/APVs07/k cP8Aqtirvqs//Vs07/kcP+q2Ku+qz/8AVs07/kcP+q2Ku+qz/wDVs07/AJHD/qtirvqs/wD1bNO/ 5HD/AKrYq76rP/1bNO/5HD/qtirvqs//AFbNO/5HD/qtiqa2Gm6LJbK2oW1pDOSeSJICoFdt+ZwJ TiG40+3iWGGaJI4xxVQ4oAPpxVf9es/+WiL/AINf64q769Z/8tEX/Br/AFxV316z/wCWiL/g1/ri rvr1n/y0Rf8ABr/XFXfXrP8A5aIv+DX+uKro7mCU8YpUcgVorAmn0YqqVxV1cVdXFXVxVK/Lh/51 7Sv+YK3/AOTSYqmVcVQ2oIZbGeMRfWOSEejUrz/yeQpTFWJ/ot/+pe/6eX/5rwod+i3/AOpe/wCn l/8AmvFXfot/+pe/6eX/AOa8Vd+i3/6l7/p5f/mvFXfot/8AqXv+nl/+a8Vd+i3/AOpe/wCnl/8A mvFXfot/+pe/6eX/AOa8VVbbSFkuI459B9KJmAeT6w54g9TTngVOf8LaD/yy/wDJST/qpil3+FtB /wCWX/kpJ/1UxV3+FtB/5Zf+Skn/AFUxV3+FtB/5Zf8AkpJ/1UxV3+FtB/5Zf+Skn/VTFXf4W0H/ AJZf+Skn/VTFXf4W0H/ll/5KSf8AVTFURY6Lpmmymeyh9KRlKFubt8JINPiY+GKo+uKurirq4q6u KpX5cP8Azr2lf8wVv/yaTFUyriqF1JRJYXCNG0waNgY0NGbboNjirEP0dB/1Zbz/AJGH/qnhV36O g/6st5/yMP8A1TxV36Og/wCrLef8jD/1TxV36Og/6st5/wAjD/1TxV36Og/6st5/yMP/AFTxV36O g/6st5/yMP8A1TxV36Og/wCrLef8jD/1TxV36Og/6st5/wAjD/1TxVMNN8u6deo7XFlcWZQgBZHN Wr3HwjAqM/who38sn/B/2Yq7/CGjfyyf8H/Zirv8IaN/LJ/wf9mKu/who38sn/B/2Yq7/CGjfyyf 8H/Zirv8IaN/LJ/wf9mKu/who38sn/B/2YqiLHy7pun3K3VsHEiAgcmqNxQ4qmtcVdXFXVxVKvLh /wCde0r/AJgrf/k0mKplXFULqN/+j4Vm9GS45ME4xDkwqCa/LbFUu/xN/wBq67/5F4q7/E3/AGrr v/kXirv8Tf8Aauu/+ReKu/xN/wBq67/5F4q7/E3/AGrrv/kXirv8Tf8Aauu/+ReKu/xN/wBq67/5 F4q7/E3/AGrrv/kXirv8Tf8Aauu/+ReKu/xN/wBq67/5F4q7/E3/AGrrv/kXirv8Tf8Aauu/+ReK u/xN/wBq67/5F4q7/E3/AGrrv/kXirv8Tf8Aauu/+ReKu/xN/wBq67/5F4qmVhe/XrcT+k8FSRwl FG270xVE1xV1cVdXFUr8uH/nXtK/5grf/k0mKplXFUr8wSrHZozzz249UDnbfbPwvsfiXbFWPfW4 f+rjqf4/9VMUO+tw/wDVx1P8f+qmKu+tw/8AVx1P8f8AqpirvrcP/Vx1P8f+qmKu+tw/9XHU/wAf +qmKu+tw/wDVx1P8f+qmKu+tw/8AVx1P8f8AqpirvrcP/Vx1P8f+qmKu+tw/9XHU/wAf+qmKu+tw /wDVx1P8f+qmKq9kFv7hbaHU9RV2BILkgbCvX1DiqZ/oC6/6u13/AMG3/NWKXfoC6/6u13/wbf8A NWKu/QF1/wBXa7/4Nv8AmrFUXp+mzWMrSSXs90GXiFmYkDetRUnFUwrirq4q6uKurirq4qlXlw/8 69pX/MFb/wDJpMVTKuKoe+1C206IT3TFUZggIBO5BPb5YqgP8U6P/v5v+Ab+mKHf4p0f/fzf8A39 MVd/inR/9/N/wDf0xV3+KdH/AN/N/wAA39MVd/inR/8Afzf8A39MVd/inR/9/N/wDf0xV3+KdH/3 83/AN/TFXf4p0f8A383/AADf0xVv/FOj/wC/W/4Bv6Yq1/inR/8Afzf8A39MVd/inR/9/N/wDf0x V3+KdH/383/AN/TFU2VgwDDoRUYpbrirq4q6uKurirq4q6uKuriqV+XD/wA69pX/ADBW/wDyaTFU yriq1+BHxgEDffFUO91p0bFJJIEZSQVd40YEbbq7A4seILfrumf7+tv+RsP/ADXivEO9313TP9/W 3/I2H/mvFeId7vrumf7+tv8AkbD/AM14rxDvd9d0z/f1t/yNh/5rxXiHe767pn+/rb/kbD/zXivE O9313TP9/W3/ACNh/wCa8V4h3u+u6Z/v62/5Gw/814rxDvd9d0z/AH9bf8jYf+a8V4h3u+u6Z/v6 2/5Gw/8ANeK8Q73fXdM/39bf8jYf+a8V4h3u+u6Z/v62/wCRsP8AzXivEO9f+krH/lpg/wCR0X/V TFeId7v0lY/8tMH/ACOi/wCqmK8Q73fpKx/5aYP+R0X/AFUxXiHe79JWP/LTB/yOi/6qYrxDvXxX cE1fSdZADTkjBlqe3JSRXFIIKtXFLq4q6uKpV5cP/OvaV/zBW/8AyaTFUyriqX67PLb6TdTwtweN Kgjr8TLHt4Ec64sMhqJeb9dzhcNsBaVZ+J8KYtkYwI5upH/vwfcf64p4Id7qR/78H3HFeCHe6kf+ /B9xxXgh3t8Y/wDfg+44rwQ73cU/35/wpxXhx97uKf78/wCFOK8OPvdxT/fn/CnFeHH3rSADQGo8 emLXIAHZrFDsVdirsVdirsVTXy7cyQakiKTwmV0dQaA0UspPyYA4tmE1J6GrVUE9wDgctuuKuriq V+XD/wA69pX/ADBW/wDyaTFUxriqXeYT/uEvf+Ma/wDJ2LFry/QXnWFxFyGjV5cdutK4tmI11pfz /wCLT/wP9mLbxf0nc/8Ai0/8D/ZivF/Sdz/4tP8AwP8AZivF/SaZlYUaQkf6uLGVSG8mqR/z/wDC nFj4cf5zqR/z/wDCnFfDj/OdSP8An/4U4r4cf5zqR/z/APCnFfDj/OaYIB8Lcj4UIxQYRA5rcWt2 KuxV2KuxVH6J/wAdOD/Zf8QbFni+oPRoz8C/IYHMbrirq4qlflw/869pX/MFb/8AJpMVTKuKoXUb b67ZTWvLgJl4k0qBuGBNAT9oDpixlHiFMEl0XUopGjaAsVNKqVYH5EHFxTil3LBpWojf6u30gHFl ATj0b/Repf8ALOfuXFlxZO536L1L/lnP3LivFk7nfovUv+Wc/cuK8WTud+i9S/5Zz9y4rxZO536L 1L/lnP3LivFk7nfovUv+Wc/cuK8WTud+i9S/5Zz9y4rxZO536L1L/lnP3LivFk7nfovUv+Wc/cuK 8WTuaOlaif8Aj3b6AB+rFjMTl0d+idR/5Z2/D+uLDw5dzv0TqP8Ayzt+H9cV8OXc79E6j/yzt+H9 cV8OXc79E6j/AMs7fh/XFfDl3JroWi3SXS3NwvAIGCJyBYsV41IFaAcq79cWzFiINlmS7KB4CmLk t1xV1cVSry4f+de0r/mCt/8Ak0mKplXFXVxVaVQ7lQfoxV3FP5R92Ku4p/KPuxV3FP5R92Ku4p/K PuxV3FP5R92Ku4p/KPuxV3FP5R92Ku4p/KPuxV3FP5R92Ku4p/KPuxV3FP5R92Ku4p/KPuxV3FP5 R92KqU8iQgH0w1fkP4ZPHj4ywyT4Ao/XE/3yPvH/ADTlv5bzavzPkq29wsrFQgSgrt/tZXkxcHVn jy8aIrlba6uKuriqVeXD/wA69pX/ADBW/wDyaTFUyrirTOqjk2wGKrC3rIDDJxAchmAr9nkrDf8A yhkZAnkyiQOaXatb63cW8y6bcLayhgYW2NVHOoblGetR8vfvKfIU2aSWOOT94LimpYDr32GLS6uK urirq4qll7qN99b+oaZbrNIqh5ZpTSOMGvEEDc1p2/rQmPptgZHioL7a+vVuEtNQhVXkBKywklDx Ar1JNN6b96bZTHJISAkObZWyp+lIBz5pInF2jWq15lPUB4cS3X0jStPxGWoa/SturPHIrxurcEVg Kyn94FEdGNS3pNQGh2+WKo0NUV6Yq6uKurirq4qgNYuks7N7qT7MKlyK0qAOnzy/Si5U06jkEJay Rzxi6jQoJwGHIUYinwkj5ZlHbZxUdZmkh/1f4jMfU8g36fmUZXMZyXVxV1cVSry4f+de0r/mCt/+ TSYqmVcVWyIsqhW6BlYfNSGH4jFUINMg5Hl9jm78F2D+p8b+pT7XxszeFTitq8VtFFzALEPWoJ23 VVO3+xr9JxVVAAbkCe+3bent7Yq3XFXVxV1cVSjWNHuL6RbrT72SwuVHFmTdXXsGG3T/AD7UGQGU atiY2bb0/R7i3uReX9495Ii8YkNeEZPLkw5M7EkNTsPbplePERuTZSLARq6fYoGCwoOcnrP8Iqzi Rp1JPXaRiw8DlqW1sbNDVIUUmT1TsN35PJU/JnJHviqIrirq4q6uKurirH/OGn3mp2dvb2iu49Um URkD4eLUrUj9qmZOjyRhIkteSPEGMDy5rsY9NDd8FVeFG4ge3HnmZ+ax+TX4KfeVNM1TT7udr5pX jeMBTKajlUHb4jmNq8sJgUzxw4WU1zDbXVxV1cVSry4f+de0r/mCt/8Ak0mKplXFXVxV1cVdXFXV xV1cVdXFXVxV1cVdXFXVxV1cVdXFXVxV1cVdXFXVxV1cVdXFXVxV1cVdXFWNaB5g0KHQtNhm1K0j kjtIEdHnjVlZY0BVgWqCDiqYf4l8vf8AV1sv+kiL/mvFXf4l8vf9XWy/6SIv+a8Vd/iXy9/1dbL/ AKSIv+a8Vd/iXy9/1dbL/pIi/wCa8Vd/iXy9/wBXWy/6SIv+a8Vd/iXy9/1dbL/pIi/5rxV3+JfL 3/V1sv8ApIi/5rxV3+JfL3/V1sv+kiL/AJrxV3+JfL3/AFdbL/pIi/5rxV3+JfL3/V1sv+kiL/mv FXf4l8vf9XWy/wCkiL/mvFXf4l8vf9XWy/6SIv8AmvFXf4l8vf8AV1sv+kiL/mvFXf4l8vf9XWy/ 6SIv+a8Vd/iXy9/1dbL/AKSIv+a8Vd/iXy9/1dbL/pIi/wCa8Vd/iXy9/wBXWy/6SIv+a8Vd/iXy 9/1dbL/pIi/5rxV3+JfL3/V1sv8ApIi/5rxV3+JfL3/V1sv+kiL/AJrxV3+JfL3/AFdbL/pIi/5r xV3+JfL3/V1sv+kiL/mvFX//2Q== + + + + + + 5774 + + + application/pdf + + + PyPy.indd + + + + + Adobe PDF Library 9.9 + False + + + PDF/X-1:2001 + + + PDF/X-1:2001 + PDF/X-1a:2001 + + + + +endstream endobj 3 0 obj <> endobj 6 0 obj <>/ExtGState<>/Font<>/Pattern<>/ProcSet[/PDF/Text/ImageC/ImageI]/Shading<>/XObject<>>>/TrimBox[0.0 0.0 595.276 841.89]/Type/Page>> endobj 7 0 obj <>/ExtGState<>/Font<>/Pattern<>/ProcSet[/PDF/Text/ImageC/ImageI]/Shading<>/XObject<>>>/TrimBox[0.0 0.0 595.276 841.89]/Type/Page>> endobj 31 0 obj <>stream +H??W]s??}???N_?n$?y???G????f:q?CQ???"?????? ?)Y?????Nv-???=???t?2??G?vt??????d*??2??7#a??#C?#??,r=??Q???????~EzO?d??@y?p??hKONr?w?p?mr?+?4e?p?:????p????s}?N??{?#p%??????Z^_G???U{fy?;% ?x? O{zN?t&?}|Cg>?????O?0?:???m??t?U''???P!?b????>?F??Rs??a?cE?dr?????1??a)VC-??nV%dp_?U??"?*&???p?Cr??P??>??Yv??'? ~???(D??=?z?m?)?.I???????A?D ??y?? ++@??h=?Q???1?V,? ?M|?e??b????O$N?Q(? ??y???????G?>R?c?jtr?tY?d?G?!iK?Ylb???W?z::9?,???%?e?g??'???????=?dt????+???^??^>~:?] ,?????]^?C??Gt ????????:???D??[?m??/?a??e?}?????\X?*???????:`:?K??7???S ?^??[T~qW?pD????????b?e7??b;?~7?y?????X?>{z?'?.?a.?=? (T!???T*???L?S1?R?,T?KWF??????H1?:^8???W///>8?????x????%6+??????a????4^P???J?P?=+`??3v?Hw\m?9+?[?w???|??eV???9 "???t?m??%?w????Q8??????R-???????@@?.. ??E?e?9r?kkq"?PH?????[? ?y??u6g??dW??4?[???K?6?4???????????r??D??'A1M??7??e+e0?g?????u?9??Ig??????v4???`??6[}/&?x?-K:?????????????? N?? "??>?P?c????uf??=A???6???_-?f?-J??#??Ng?D?5K$?^??X?mm+??????I???R??7??I?}iK?Y???:??1S?o95?V??5?@v?D ???(tt?o?n????l??e?^?,T-~jl???c?m??T??????oP???B??1???Y????0bm!?L;?E?Gb??N?3??:?`????B? ????K?? ?. ?HDpu ?KV]??Y5?(?7?????? f??????????%????rD????nm4?"kY? $? +7?X??????n????%DglE?????`3??l ????N?=?a?e?e?*??F? ??5!?f]?Q?&K?eO([? bBG-7?l???&U????h???Y?4??5;KK?qf?Q?aZ?M????2??4yQ??b?????~????d?L?? ???* ?x<?2}??I1??? &iS? +?????u???o???7;??xN??????S??)????Z??L?X???4??3M?/p?)?,M??????????RH?G}????????J1!".0 +? bx1?d?O???? ?u?????ma??iA???u,????`?jt?v??7??=?}?>??OI1 #? ?K?mx? ?*V? X$??>?1?"????h?d???2??2???????? ??.?????qL?k??FR`??w_?????v_?w+?n????qY+?????!0??e??$?g??c????\??y~=???vW?=?.\?z1??R?>??? +???ZFi!b??????,_Mf?J?^u???a????????e?8J?????,I&?6?pK??-W?w!????o?????Nh?@ ???0k2??dh?%????eV??{??|??~????x:@??Ys????_.I?+7??*?)???ex =????? I????????IW?L%?????Vg?;??x???? .R.?????G?\?(????lm??]?JP?uz?? ???^?Q]?#?=?????0?9^?:?8WrL?gO??X8??????x? ??Z ~\??????8a??????9]V??t?8??i????s??cz{:t(????4?H?????#.??')f?-WQZ;??????0?I?#T??t??v??u?R??9xc9Q?$?`32?\7?DOH6/???????Od?_@?R?|?#?%{0J*???0??6????????A?????b[?6?b1?e?9??????.w_gh?v?/.FZ6???Nrch????>??B?Yc?X2?zH???r??W?5?^y?aD??7NKA9???$2F??6N+?T?,W?'?5J*???Z;?H??\gv?f?.????1g????`??N?l?}?r?!?????f???^f9???C??Y?-I?????,A???????x???)??{m1?t?s?LQ??W l?A???HA???qZ???|??FN??V??XO9zl??(???2?@h!??[?P%??+,G?%?????~?y4C)??P? ?J?? ???q?p at L?=?eA?!>a??????? \`?7t at I7Wh?qM??W??5:d????=vc????????????-]%?m??????_Q?:JYs??/???+?g?e?G??'????~?????g????~??????`??#?+n???;????y???F??????? ??%Z?Z???oOSY(?t?-J?o*H?.?f1???'?s???X +<?%?W????? +b?D?e7?u??????=($%DR??*?[??????:??tTT?>M?>?H??IjRj1?????L???,? C?????fQ???]?Cc/O?(|WZ????Gt?bJ ???]9?b?T??f??S\?N+?????}?R #C??l????e|?V??(?????UNr???T???1 H??Bi?????M?6?9?????u??z?????]Q?b}?x|JB??Z??Nyh??KgrmN??$?fZ?F?a?8?BA??qH#?|??G???{???8?Y??L??????LE?T\????s8??>]!???5&p???/W???V:??T:#??\GN?5 ?????????0q9W?T???G???%h/V??p? #?U?pH???v??q?????')!??SN<>?%?????uY???d ?????????S?^M??]???p/AN????? r????? ,?Qqr?n???;M?????ul???TU-?YG?\U???vP??m???eJ?%???????????'????J??????k?X!??u?????r?sfm? +??|Y;CV?~???^mof?C$69?9???????/?U?)\K?= t| ??i XX?Jv??F?????>?;???s?d???mKK?/ Y ?:???GzM??$?????1?Y?W\??.?n?@?R9?S#?|8????j?(? ?B&8?????????R???WB??????kkc?????\2g?:i?fL?o???B??i??P u??\?A_?????????^?%???f???????p??-???5?"_?x??;f???V????m?????vn3?#??i?K?gM???D??z?{>??Z???0?q? m??? 3T,Cj?.?62???rwE???J???7????}????J?0?Im+????u?zk??????+???&?e~/?????. j??m?O?????)??}??y???O>?f?9&?(?I? +,? +4 j???8?/???? ???????5???]??????n???????P&??"[?????W{`?%wo??j?o??????e??-?? +?m7?#?.?-s???{k? &?M?rEX%?]??\fC ??R:{?)???~????\Qx?U????G??c?%?[??|?s#?$ ????7;??NC??D??!????+c?h\P"??%??,???r? ?5??vJ??T5X#?n ???\?t????????-%?? ?40?7M)?j???????????T???V?? ?j/$?s?}???=|BNV?~ ?DA|4??CE?VO?O??L?>.h?(fD??:(=M????j???t?p??? a??gG??UV??K"e??qG???8?8???c?TJ??5???????f????G2?!-M??k h???f{?nscm???8??*6'????????O?w?S????J?&?-??Lf???????DZ?????????(???^Zo???????????{??n?)][??? +zj66??????????P2?jP?W?&?)????_???W??^???e?????Wo +P?94 ?0S/?}???8}???8}???8}????qj6N?{?X???;'?z?Q??*!0?? 6}W?(~???|?j0????{?f/FC?}T`??>?4e%?6z?g2?H?????1%?? %? p?B??????6g?? +????????l???0?y,QqZ??5???v?'y?&????mx?dvk-L????6????`n_????0i????ZL6?X????TP?i?EC? +w s??r???A#?l?ES??=?q????1yST? y? ?{}^?o???^!?j? F5q????7vp??{G?????55?u?o.w?????8A3?+7?A5??<+J?Z?X}??7????|?R@??s??u??\????(D_???h??????S?#??????r?????d?W???+????fxIG??????~Ng? ?@0_t????U??Ff?B????z???T?` k?}q%XU???z.??l?TuZ?+??U???S?????????Lk??&?|~U%?q?????]??) _?H_????}?x?????'Z5E???? L? ?q?5?OW-Z?????4??tXS??O????G?=? ????|??m??8?y\?????tQ?;R1y???<}?Z?w!?hW??????KGK???\r$?? ??S?j,RI?`???0??0j1???????Y?)???ODF?v??8?ZOI??P0#??*???b3?~??2??e0Z????7?1??w?s?9 ?Z?? ???@?k%??`??UP??~?? +??K??????J?{?k?f9Fs?aT??????&?K????????w????=????H????????U?z?^?U#????????)d?^??i?ngc????????-??{ ??S?~$g??{Q?h?o$?B'??Z?$]L????U??*??{3?????mk?XrD?????lN?s/???^?b7????k(???????f;??-????e?}3??RAj?QjL???G??E?MQ?C???>(Y)L?f??c??k??9K??(R??A???G ???e&?B????^?Q?%?/+?&??? ?*,o?{ ??[*??|??????Y??5'?]?????????????=e{"_l%??G;o??T\?SUa?GL~? ??{ ??Uep=?????w??0?"A?!?Ts?E?0??p?[??I???k-M?????{?X?Ja?+???+T???N??????Wn?^? %?so^? ????tU`?*?U?#?^??L?"?~??"?????nfZ?o9f'a +??BG?:V???? +1?~?->???????aV????r?q}?r?zp#?z?cq??=????????{????X?&?Z?-"B0~K?NY????l????&??=??d?5??>}?Zta??^?Z5?P?s?z??v????lE?)? 5-cD?Tt?p#?X?y;}?h?????,?????u-[?^?D?b1?-??8?????? ?!???IjH^v]???2??????Z?>`???+??m?:??T{????????????R,Tl?? ????????2q?7 at u????S +P7??????9??:o??Z????l?????c?v?e??aj??s ?+?A?3??W ????>f?P?+;z??P???*? ??????>?i?7?C?V???? +D@??y?????ha?@~?FVF0}?U???%?W?raX?{?? ??+?ZJ?r?L? D?A?_?.a?H 3?;2?n:???p?? ?m?v??M:?j P?*_?????^w?????s^??????||f??~);_?Rb?????~?j?)?WO??n?&???>??_o?k??@Q?????? ??5??\e?(Y????brt 3?Fdo??B?2V?}+!?J:?t??a>?g? ???????=????f???"??.???d$?a?J:s?CF?trm?)??A ???81??a4?????.1???a6*?v"??~o?AG????z??J?4???"k???Yd??Z??MA???Z??????\j4??=? ??s???????&?*????????,eYu.???(????B8u?S?j??XSn?MW5W??d???IL?(??Hn?A?????E??}?j??Ba?z?6ip???\??????%???????Z%L????z[?l??u?] 6v???0?u???zL/????4{K??`?P10???~????f +?z ??-v????F???&??'??_/????????2??z?G??w???>e?k?? ?^? +@F??S ?,G??5??wTW?q??? ?????i????o?p????hZ?5?(h??o????K?????r 5????[^??J??}a?z5??w 6?A?`?~?A?+[????%r????????-??h??k??9K??x????z54W???l?!??ye??? +?=??[O????}???? ????\$???b???v. m*??V?{????????17?1S???) ?uma5?x??N?V????o?^? %?s?]? ????tU??*?UOA{nXW?\-|??u????V73??k??0??S#?+????e??M??;??g??E?3+?n?????q}?r?zp#?z}aq??????Z4;??T?Z?uP?S,y??v???s'???Po0FTB=?>f???L?b?????r?,?????p?d2?1????Sy?(\????/8+????lv?X????]?y??L?E??KRZ?t?h?W`%R?MY%e??T?[?=?~??????r_??v?h?o?????F???!?TF?}?????????%5 ?T???]?? ?{M??????G#?#??1?M:?|/Q6OFl?=0??O????q !q]E?? +khB8ES??????9??:-%"q:H?>7O/??UD85???q)??E?i?J?2.N?8???e?[?0Cs?GBl???????F0??3??4?|e ???\?? ???Z?u??Y?E?e??5??? 9]?)?hJ?= ?~>?4W???|=$Y??V!?vD{;fe?I? ??!`??BZ???s??M??3??/??d??Q-)???Jzj?r?????J?y???9???5???`?v??]rYr?Kt?9?)?]T?F??&x?y%???3?T??@???(????;{??[??e?????;??^?+?K?NWD????v|??* +(?la?J???8??????g??N????5I?6Z??'?P)???-Oz??1??i?????"??\?oWn ???????!A?Nsc????DZ?6?%:_kU^%??%D?2-.??k???&?4?????;???@??|^3|???Z?f????N?O?6S?A)?OP?i???]?[6M +??????z???p??`?????Z??d?S?%?ul???? +???la?^? +?H????!????????cif?Za(??Z??5g?d?E?[? ?RxVq???????#???(??k ?g`?l??f??@?y?Lk?F?m[It*???GU4??[*?,$??z0??4???P?H?2-???Qr?p??:#a-.??M??????? 3?a*wj? +?v?>????{4????fi?&7f?Xm9E"t7c?????l?ZN?????f/z??S??\9??? S0?u4e??uw??0????&M??Y?7{2g??U ??Y??]F]?-?U????l?>????????z07??Zm_??????}??.?o1?? ??y???????????"?t?jw?? ?+????sBf5??H???au?3LZ?v8b??J$?0??b?9?jAM?07?B??3?F?K??G??{?o???d????.xt?o?J?????-???R??T?i??G?6??P?z?\.???aHK#??}??????-% +??/??V??F?ip???lx>??1?nU5????.??o5 ?|??AO?3b?Z?[Aj?lc4+?? +k? ??s +??p??x?Z?J$?D?e?c??>?????5??Ci ?#z??? Hk4K?o??J??~?h|?q8'_/?.????W???_o??/??`????h?B?c?DC=L??R?T???^??????!???SI??]??I?FXS}??P)?z?.????;DV??$??j??B???O1?T?:.?z????v?=\???.?H?sQ ?C?2?v?l???@.dy[??l$e?? ?i'~i???\4????????ukTa~?????{Vk)?`?|???Db??.?G`??Z???R???5??H5g?u??pGZ???;?!u?|??(? b?I?????G?T?v%k9 '?G ???? ?,?}???NWW?8?o>Q??N{T??`?o??U?????iB'`^?q"e?hq?`??5^???????db??=?t}3m?8f2??s?N???6?t??]??-??uAM?.?I2`k?*dw12?{??~3??????#?!????>???R???FOx???y???+<&9?jE?I???z?.Y???bx? +mk|????b??x?`???uw$?????o'????????%H????????#S???>??W??????{?5aQ??. ????L?-?H$?fDJ??*?d?ZN?????fw??S??\9??? :-???:?2b???F??L??O???I?#}?????Y???A:+?]F]?-;?U?????^>????????z07??Z??????? |>a?4R?|????g?????????HQ]??????6???=?~&lVM??XdX?O??!??ao?????b?^?D3?@v?|^A? ??0?66?F?K??g??{%???????#??L rE?=?????I???D?y?J12"N???????BU?????y????P@N2c?????? +?Q?Y{???8?i??S????#A??>?3??i??k?sW??y??.[?J$ 8vi???-h??E?T.k??2????K?????Q?~???OCb?ZCar?lt?? + +?? ??eo??cM?~??G ??Gv?l{?:??roO???:?Y?baA?N>@WU{??G?r???V??kA+?????>8?? +?uB?Y?j??}?? ?#???? Hktk??d?K??~?j???P??^&](????W???_o????d??U?%=:UH?j??h???Z?@?j?{??u????? ???i ???6?j????"i??J?X?%??Z??H?2??2Di?H??hZ??m3f#??KiL?x??????w???????M??fXA?!?]??fXo?p5b???O??] +??^q??`?a?????aN?Fg??tw??&?????Z?;ew:Ce???????gXGh??>u?|????v??e??ag%??RM??Q??7sy^? X"?????X4?g?~X??????CXC?"/s???%,N?"??-?????AL?M?4$>v?h????9?d?:??????B?n?hQ,?????b[u"??Qt???Q!s??!??}??Av ????3V? ?_?HQ?U?????d??????AqzPN? ?U79? +??????I?p[???m??Ag+L,bV +??n at hK??????f?l??Uw?Y]??Ga*wj?j^?N}~???= +???f?r]??ruw???S??;?<??b[???~????_{*??+gS??A?Ig$???!?Hq???G????G?(-=|.W}G?8?O#?????{?D???[?K?B??:??V???? ?G?"??g???L#ut??????? +?_?m?S???. '???vA?AZ? ????A?????<`?%??y??e??????{?x?l?!??2?r??tr|??????n??U>?Z?J$???????Bb?j????&a_??B???? +?? ?5???G????-???P??^&](1????)?_o????dp?]?G? +)[??M?0?BKhQ?zo;?N?{??!y? $9??I-?9???I{V*?Z/?k?X.%?]?j???????v??D.K?????VMG?*?T?]?,???G M>??k?w?f#? m?}???0??A??`p??Y?`DZXs?w????m???????Q??9??e ??^???f'tD.??A?};?x?a?@?i???????9?? T? ?TO+?*??g?~8????????P??JU??B ?S?H5?A?3r??Vw?C7~?4D???u]=g??U???,0?Wh????-?e???[???M?>h????]T?\?????4???i63???02%??~ybT>??G??p?J?N??M?????YEk N??9?]?{?K4M?d?????+?6]7?????Sz??I?????;?? /WH??????jN^X?u?????i???+e??????40 7?jOD???`?4??,]zz=?L???????4\?:!???? e???????w?? ??w???Bj????$3cQ?V?????@????3~L?8???6????????1e??c?????,>?P?PSd???/?3}7?Wi?UwDV?? 6?r^.k???oE?+????E???????`??o??N? +:?E?#???["?S%??P!:??T?????????4P???? >Y??\%????????O???c ????G ?K]-?? ~msi??6C???'.D _???k?????=K!?E?p??N?k???1???g?>ZM?? ??????Wy??sj?????K??????_65???2??t???v?[??m?>?a?i?????]????5?=???\}???y???f?>?????+!$?? +2R??T*R?l4J?r`l 5?|&????B"(]W??e?\??l??>?Z??X???mr[?1?- V? ????)?+n??-???=??????6?? +#S?????CI?? '?j&?? /???D????????P?n?f?R? OWH?sj???Vh?C>???}?>?J?%???zK7?v?o???i????!-)????^l??D?z?\i?w($?o? W?aV???? +?j??F f?????? 2?u?7b??`?????R?^?%?????N??C5??z? v????????$??d???Dop4????A??.%?A:????x}?K???n`c??{4??m(?)P?N?^?Xa?+,???????M2{A???l??ni??Z$R??????O???u????J}???>? my??d0?&7t? +1????~^?y?_?+?/W?P?#?iF??????0??yD2}???c5?g?HLu??/6? 9?p???X??t_8b,??W,i????Tv???Q?W\?s?`{9??S?w??.????P?uh}1"p??????????vE? ??^??k_86G#;?4???7??#???d?"z'??F#??????Q>DB???N??b??%q?R????????lyE?*{?R??5???|????cE?T/?????/???sx?L???? |?lP??I?S ???hI + ????e???M ??Mw6*n??????6:D?????7??Q/cR? ]?cTYT!??' ?5MKbM@??h????:r???K?T7f?>?yP??????????C%[=???????4?_?i??)? ??986r,v'??????????????????~??K??B???s?$???G???????cm-??Ax ????6?V?Ib??? +O?2?g?C?.??,,????;?????????a???]?y??k*???6l ?Nv?????~ +NQ?"[???N3?p?E7?a?p?uZ????? bu~??Z?y/??` ? :??e%?j???y? L?|??r'?????v??I ? +e??Q? ??bU5??U?0???b??? j?eJ5"tT?`??i??i???i????-0yH*?U+?YY?B>iZ7)z?l!?? ????? ?_?_^X?v??"???Z??U!8 /???6?)be?x???>(?$?;?a?iy?$Q?q??8??i1??w?6???n?N?~]???bh?`?????C?? ???,?iO?c? ??}??vIc?I5????Lp ??}.??,4?eM??5v/ 7t?h???nX???B[D??#~???D??~????{a?s??[?H??-?r1fz?????~fN?|2??0?S????26??????.?*?gGEZ???h???]7?j??h?,??W ???*wG???????h?x?k?????wOgs???r?yR*B?5duy?*?!/?B7????A???x?J?>???W?Q?5Q!?i???4$L;?d?l???ep?????_6)F?S1r???n?? +?-??D?Ed?G&45??tC???|???~U??wQCmK??B??Iv?|?jt4N??T~L5iGYM[??a\?Z^??*?rug????A??f]???v??;Z+nA?-??P??1?^?@6P??W??.???h?jFb?????????}?????# +?'??n?)?????#a?r????=.>??et???n???1?;?_??u??]?}OP??L{??S?1> +rU?"auhX??p>???Hy^??????E?)?:{?A???????????d???r?:?? ???pS?2?nO?U?{?-?????8?????d?l??c?k????4Z???^+?q ?vD?f?F???n-???:??i?'mJ?3????V5JVh#8'???*??$??|???5F#??:??8A?8?bO\>bO,?t??G???=?W?E$I gR?@"7?~4???????????G??,l????3CsFh??m?M??&??J? ?nW?,o?$?(???> y{??? ??V??I???HA?eX??"?? ]?? ;?????5???;?M?N + ??`S)F@??-?????qO????????id?25Z?.P q[??E? ?~???k|U(????*4??%??????v? ???FU?h?2}?I?\?z>?R?;i??V?&?????v??B?jq=?)?::???^??%S1?kg??V????L?y?^x???U??6???}?|???C~??U$?????2 +?V?????????*D??????o????????L~?+86e???k?s=?E@??4b?F}UE???:)?a??????y?1?f????@d??$?C2k3s??RI???i! ??}O+?'e???9???AH?~wOe"????Z.B?-m??f??N\?nX_~?d?u4|!F?<??!??}|?\1+R???9?J?\$????t?xy? W??`5Kl???4L;d?l???Up??????_6)D??W!r???m????????????t>? ????|???C$o???V???k#?ROLT?k??4??.?,?????Z?J[?????AG ????J?????6?a??o ??%D +?'8_??????*(?????B?r4???HBU|?z??Gv"a ??e????5??Vr?cPI%-<}eE?? ?$?g?X????XH}?9???0?????F????n 4??G???]?.}'n??????Dj5)??H?Eqi?}??4?5?-?dDR????\??XzM???O??V??#???8leM?*}?|Q?e??G??E????\?\???x? Wz?_F(??;???1?;?_??+??N?Pq2??? B??m,???"m??????~??&?? +?T?:?j???-u#?????dv?<}????H^"??????LqY???W?gF??Zx lzuE?9?~? +G?c??>#Q??gm? ?a.?-@??z?'k?nl?(+????~???k|?e???:?|??_n+?t?Q?/a?CSel;&?????";???)???G???|??E??p???X?h.?' ????JJ +?~g]`???;Z?F{??jNx???_+,???+q????Bp2`I???'h??m?>???{???=??>?lQ?O???')?gR???!??+???^w3????r[?) +??ez-?2???o?]?@?R?3B[?0?mZ?51?W\?v?rg?z?#a??}4l???g???????1? ?:???@):?V?]E??`???ZP c?h:8???Mj?????1@?>?^f ????j ?'??B?S??/ ?Y??i?VC]?? +?~???v?F?~?}?? +???W?????>?e_?C?P????b???i??1?B[??o X?5?`??B?.??~????Q?Y??lG=??|?^?t??5???'V=???S]?B}Bj??W ??z?Y}?????Y????l? ??q"O????{j?????=?Y??????Z?????EE?5??\?'???????x???m?@x??x/0N????d??E2 #??z?l???/`??Hb??b},rT9?j?|?g??S=????????-?q4???????F???W????Ej?????2??V???? HZ?????0???p? ?+?I? sz5?C??????+y?O??:??>y??Rm>???@?????(?[?????5 ??5??\?????2n>?,A????????? !B?e?Lk?qU??tRD?C?\?u +??l\???U? +j?2?e}J*? ?p????????`???K????z?????h?q?D??x??|???bj?h5I?)?b???????l_????U?c}?t??5??????K??zi?P1???U???@ "?.????,?du???]d? w)?u?j??{???1??h??u??.#y????q4Dg_?x 9???T????H???*A?["?????A??'n??`?"?r???!+?J?m??KX>?]L?????@2?[ D(4h???;??@?1?Aw?? +z?'??M?R?yy?9aU??i(??$Cq*eV???Ww??W??? q??W?jv????LY??L?9??v??????1x?+? ?????A??7?!?-?M???k?????*n[N[&E?? ??+? ?? lD??D?{????? ?V?L,??B??q?#???<)qK?+????????b???9-???kR?Zm[,p}???.?SY3?^?V?&w-nn??????3?4w)j???ai6??;???^??????OVec ??2??Z3?? ???7E?O=_qT???>.???iM??{?? ???ooZ?H????U???v??????,??sU?KXC^??)0e?a??j?Vg?????5??#?|U?b6? ???n??/???8#?S?W????????I at 3?I?M???????-??}?0gK?k????t? ??X?????>??????0f??7]?OM???? 1? +??Z4??6?P%<,??+W?4?????^"?2^???v??#??f|Tk. ? ?G6[/??L?{?~???Qb ?? u???B ?A`J%KP??C??a????P  ?b?i5??f?G}???????????2?-gz6?V4???w?nmWw?1?????1?"?~?F?m?|??@O??ie????????&??~???9?w??y_?y?5?(?W??????]A-?U???49??????M?J9?q???2?'_?}mh4??#?? VOc???@?z?tC?????6?Q??????r??c?n??Vs???=????)??=mk]l??????~Og]3?.?????b]#f?,? ?-$@/}x???A?g?c5??$?X??$??9????c????p?? |?U?????? ??S??pPyH?:???~ ????]B?a??;?C2H|???]?Ip +?R?M]????1+B?????pA?Y??vR?z???f???i ??)[????-3[???????{?BA???*?????????????C>Mu(h,&??2?a8_?}??:???/2???o?????????????T??????6x^w^??k???b?G?+I???`?wqn??%?!{v?v?qk??SJdi???!????o??@?)????,?p????????!:?Z?#H?6????jI?X??(_g3?*?l?K?? ???;/t??-n???(?C??>??=D/??2vy??t |"e?,B??|? +t1w??:???F;?0??DB?e~Q t?f??"?[`?w}?|>???[??U?8??%??:?{?[Y?:???P?????V???\?-?o?kh???]m??????9???VM;?x?? +TKg???W?3???SV?C?????60?'DT? ?=??>?? 9d?r?Z1?6;?????M?x?9L??R??U???OR2?????bbh??E??/?WJ A??b?h???????<"??%?? ??X?????n????n?i??r2?P?=?I15??}?v??3Uh? ?7Z???y?K&?,}LbW?R???'?c?g?????jz?x??r;???W??2?a?;h?:??cO}??XUF?!?Ia??????t???a?????7 ?mp?s?V??? +#??mp???????????? 0??4?????6??&??L?N=?s???H?{???2?G2?S?w#??dF??;????d??????W#?????j?????1??R??^??1?bb?3??OsZ????a5? lQ5???;?i?~]??Hoq?????)??$?l! +??M?T}? 3?0????WkA??A????,?u?2hU?1??z???c??aE!2,nn??V????B??~V ???!}??????4?r+L(??????Z???2??:2?? ?????Z???+n??????"???4????pk?}a??6?yK??????U??0????)??K|xX?F?CP?';w??1P??'? +L]'???#??XJ??%?,??An%?/?J~_H%?qF ???y???9[|???\??y?f??r??n`)?7?46?e?z|?-?4??e1D??E6?72??????i???q??(7????????{?0??\.9?~?????M??:???@? ??d;?Y$???Tc??c!??C?n??O2.C?????K]????????f?L5I%^????????y.!???CC?{/+J? +?8????f>?He4????K?B?CY.??u?? ??fQ#??^Ie??$????R????K??~?sx????n?!1???%???A???? 2 ?? ??P}? L 1!-{X?9u?tF?u!d?*[??uv#???~?? +?'T%? d???K???@F???,i>TE??}@\Z?lX^??]Y)???*I?-D:??????i? X??H?4\??e??|+?????Ui???I?????&??!?J??Z?@$y2l)?%k(MM?R?R??B????a2-???r??|SNAd?$y??T?j???t??r\H??&~?????.????L?>????#;?S?O#?????????????8??9_??d?|??S????????JCw??lD]??c0e???,,A?$??'?A?D0??????}?'_Kr?.?/?%~?w?"[1??e??9?QC?????0*? ]%j6 ?(???k6??+(?@?/J??d????WA?? O#?? ???)/+?)?,Z???????I<?5???{? x$???f???CR?*o' ????????#/?????R|>????D???D??Sw??Z????8(-???V??????GV?$???%???y5 +?????{@b?]?E??2X-?C?>??L???????(cL????LR??????$D??}w k???1??R?Ps????:?Q?@????q?Ny,?,??t???|e?-Y?hM?W?%??'??????B?*??? ?uYk s???i?????W^??? ?????iI_?Md??ni???i?'??$?gJA"`h?ylf?3?"HsY?J???g?e???G??GPF???ILV??? ?o?????-???m?SY??M?J[???\?K?Ix?%?^???7*?o???P*?+^???t??f{hcp???c???@????N??B7?)?:????$?fV[??[~ +??M????????+???t?>???? ??j???x??(D??G7?&#??3Y?u/?n0?nt?z^???? M_???t???a[?4?)_mr~?Y??e??3??I??_'??v??????[?/??????n?w6??:O4??$????????s??#???5G#x???$y0 ?k?NkZcv??9?&?^5????y?^?G??v??Y4??'????v?;?]?y???rG???z????0?????7??M??=?;?]?;u?Y?$????sdGw??i$w~??S?????h???i????OF???|??V???8??7)?<4?[??!????(L? 5? Iks?z??????? ?(\?m????,@?j#???L??J5Dis???+?? @(#?K? ??=rK??,??J???v?\?=?U????T_w?N!?p?9??>F??O:?D?7B???????5?]????|{TgUD??{V???&?^??9??f?Lk{?????+????B??????#x??S7m('n?r?b?W???H???????$?'?2?lz]?]??&????U???j?LFzl/?{u??h????????;??/??^?:m?R??,;?FX9O#G8? ??7+d*d?J*fuA?orT??r:?????m??Ah-?h??U???K8?<\??>y?t?'_??????k??4?F???j?~pT?Fm?U??I???kd???q?Q?????????~??_op,WXxUGit?????????_?`?????~?g????zoY X?*$1]? L?;?????tA?+\?X?4{?*[?x,=????6?F?r\&B????i?2\P7????IE y???????):?+?f??A??l?tf{-??^?Rl?^?????3?g-3?3???- +?????+-????&?oL?pN????V?=??H????b?Z!??$?4?t?f{??? [???K[0?4??c??n{?%dR??3????8??b7???3N??? ?o#W4^??v??h????;?M???5v? ?&Ew?9%U?wc??>???.i?k ??{hTJNw?[???H? @i?y??X"?H?~d?J ?a?-????N9?; +w??Fo?y?Yr????^?K??(??6?????6J!l?WaNt?i?2?M??G~??o??#?^>????????????l$b?je#?0_l?V??j+F n?c#??l????#T}d????P??d??9?I? ???????T???2_?|?)?W??f?]5??B ???F?+T???J^????5 ?P??D{ ?????r ?#??e??-?&????a??!?????U??\??gP????x???N?G]t?RA??W????T??L?c?????vGe?gL?I??mq at 1??g???%??u???;CqF???y??|?~:n ??T I?-d?n?#?????tB?3m?.????2????|p\??o?Y?????g?K?.ab&GOy>???l??????????qB?E!?E??????>Y???7?,q ???d?J??? 9??t???*0??2~e?e??? ?R??JU ??????XX???;?[?us???l?)??0he?^~???\???k??Fe!???3?4{?%:o:Y!}Fp]ha??-~,?????^[??? \??-Nbm?M?)????????`54nr?????h??W?????)w??P}c?Qk?8?2?l??J?M??l??V??C????1~K:??+f????i??Z^?Z?l???9s??c?,????&??????^?d(?Xdm? ??4W?????E????)JAN9?sw?aw_T]???>???i???9? ???7?.XJ?d??Iv??O ?Q|nk??=???d???r????|?=?;?@?Y +endstream endobj 34 0 obj <>stream +????Adobed???? + + + +  + +      ??????? +  + s!1AQa"q?2???B#?R??3b?$r??%C4S???cs?5D'???6Tdt???&? +??EF??V?U(???????eu????????fv????????7GWgw????????8HXhx????????)9IYiy????????*:JZjz????????m!1AQa"q??2??????#BRbr?3$4C??S%?c??s?5?D?T? +&6E'dtU7????()???????????eu????????FVfv????????GWgw????????8HXhx????????9IYiy????????*:JZjz????????????y?????????6*????b????6*????b????6*????b????6*????b????6*????b????6*????b?????N???n?6*????b????6*????b????6*????b????6*????b????6*????b????6*????b????6*?w?}8:????R????b????6*????b????6*????b????6*????b????6*????b????6*????b????6*?????????????K?b????6*????b????6*????b????6*????b????6*????b????6*????b????6*????b??~????:??.???6*????b????6*????b????6*????b????6*????b????6*????b????6*????b?????N???n?6*????b????6*????b????6*????b????6*????b????6*????b????6*????b????6*?w?}8:????R????b????6*????b????6*??????Y?/? [I?????=?MF???k?/???%?I?L?6?S????o ? +Q]?;g??????Y<$rp?????u???(???1????b????6*????b????6*????b????6*?????????????K?b????6*????b????6*?d??G$bG?????<]y9)??u e? j????y?9/?8??~]?r?o.??Y????$z?Z??s07?C??r?12???KV??_?*?????????? +?00??Mzxwo???????C?+?u?8?????1~[~b?y?:<???u`???[ ???e++??+?x?M????L?g?4Z?????f??*??=???U?Q?m?????Q? +??}??A??v7vlU??Wf?]?vlU??Wf?]?vlU??Wf?]?vlU????puGWcp???Wf?]?vlU??Wf?]?vlU??O??-???co??#??u?x]d??8`J??l at 9f?S??????,;?;?????s?9Z?6*????b????6*????b????6*????b????6*????b??~????:??.???6*????b????6*????b????6*????b????6*????b????6*????b????6*????b?????N???n?6*????b????6*????b????6*????b????6*????b????6*????b????6*????b????6*?w?}8:????R????b????6*????b????(??????I?nI' *?B??????????+??qo"???$B??? ??a??!??c!?Q?v?+?b????6*????b????6*????b????6*????b????6*?w?}8:????R?????y??r????n??g?/?????3?wyc3X?E:?<?%%A??*`Gl?;??????5?8?$I ??q???????l? ???[???{??[?m??+k???A?M?\Gp?h ??~??.???wge??8??]??+`A???1????b????Ok?Vq??4NY???G???m????7vvo,yz??^[?|??t?.???t @??5Nf?n?r>?9????Q???)vlU??Wg???'?*???/?}oY?U??'?4??WM????????C?9 ?????????M?~???o??4?%9????F??\????????tz?{v?O???????g??`???ORO?&???2?9????1v?e??O?H??O???^ ??[??????j\|???"?A???i?-p?????9??~Ex??}n??Z?????q?r4???f???????#S?????i?????O?k??F????????+???v?????O???????W?;7????????'?O??vo???\?5??>O???^ ??[??????j\|???"?A???i?-p?????9??~Ex??}n??Z?????q?r4???f???????#S?????i?????O?k??F????????+???v?????O???????W?;7????????'?O??v;?v????:??S?j???????8???;?????9?mun????????eaWf?]?vlU??We?a?b?8(-?7&?f??4???3}? n????????f????y????ro?o??Am??7?7?q????????8?[vnM????h-?7&?f??4???3}? n????????f????y????ro?o??Am??7?7?q????????8?[vnM????h-?7&?f??4???3}? n????M??????*????b????6*????b????6*????b????6*????b????6*????b????6*????b????6*?????}8???Wf?]?vlU??Wf?]?vlU??Wf?]?vlU??Wf?]?vlU??Wf?]?vlU??Wf?]?vlU??Wf?]????]???6*????b????6*????b????6*????x{???[?N??/??8??~[N?r6?y?=FK?,??6?R["???????F????G??N?9??????k:?m?OC???%??????i??g8??&c?D???<&???4??V?;.??R????{H?y???j?~??Q????g?B1? ?P vU????u-??eq???i%??O ??G +????b??O??yl|??N??b??V??k???=P???+/?-f????~?=>???v??I^6X??qHF?e"$? %?X%? ?x'????Q??P??~y|'?J&?????yd?(???V???31?U??z e!I4????[????io???[d??f??x?,Z?R???68??25w{W}??[?7Q?n??U??t??J?????-w??U?~Y>???)?D??|?j????????9????? ???4????Y3???????=??Nq????hY??i?C?K??]???????_??L??/????y?y[N???]`I.} +????????G??????W???Y??b????M,a?W????j7?w'+Q?z???9????????]??h?I??K??Vh'??[[?$???C???$I??#C??GHNQ ??#?w???????a??`??4 KM???????ha??8?"*???~???2c?Hs@??<%?O?(vkK?????fK+?h??h?E#???$????????C?nE?=?]??u???U?????^??fn??'??2D???9?U%M?l??[??4h??=?i???@???j??|r?vlU?? ??? 0R???,F??????}8???Wf?c??B??C?Tn* A????G7g????=?Y?]~X?G~ry???J???????|?gr?o?i?_YAy$?[*?+?zj?I?R?????;???????5???L?,?????\?????|???5l:\??JF;?????Q?8??9y?????7?????;??^k?{?o????i!???{K?{??U??j?Q??!L??2?WE?Wf??>??d?O@?LgDF?1Oa??MvS?4Aw}?????s??????|????????.?v????????Z???P??`????s? ?Z???S?N?????>??f???~L?,? uc?lA]??????????? ;????vv?? ? ?r ?p????w^_?????????5 *,????mR9?W?I?????7?\Z?c?????c?? ??F???u???Q??]?c???jL@???1??vx/?q+?&?????.?d?v?l???$??;?F??[$?????RA?@?w???f?????vZ?U?L?c??Gs*???i??'f?wT ys>?g?5??;u???6?_?????L???@r?????9i?q??#???????m????/???1?h???O,?s~[?t?2?%ZY????? ?????h10???? ?????; ???H?????]?D=???9?8}??7????X????????j?[??[}?T ,??y"?:;?RRZ????1?????? =???js: (?&?VY`o??$ ???]?if??^?S???#{????????????>`????????l???{+M:?O?v?X-?uFp???C^???=????h???a????qJFGc#.???????&?9?>~N??_?????????6???3???????_~??#????????????O??c? X????OC?\?`???H???u?b??#??C_l???#??M?~?,??b?d ??on???????D????B??1??5?9?????W?u??S?(d?2;????n??? ???/?z?N? +??Ec?9-?R'??u(o?9l?+?RuS5??%@??^7???????8????}w ?Lb%`??Cc]??S??]???>?????????????d???.????O8?z?? ??5]s????,b(?????#????/DE??c??????8uz??A??c??Q??yP?co????}^?A?????!??r???vK????*uo??-c????^M?n??C?R??PJ-.-hm?-?`??????????k??????Zm7jv^? # ??#?2?(?2???M?v/l???,9?"???3??i???_?/(X?=??????6???l???>G???|?W?>???]???O???>?DH??/???g?;?#N?????????w?q?"H!Ky???Q???N??xw??L??i{`??????w~?4?j?i???????QO???/ ?????M]C+yA?????:r???A?0d?r]Gf?ux??t~???W????'??a??=?????????=O?OY?S?-I?8`??C???????????O????ho??? u_??|??S?8og?a??8??;???????E??"???1C??p???G?^??`??_?????G?}c ?o?????/?~0{???? |? ?????????z~??O?@e?????????????t????.9??????kqZ??\???????L4+??U?G?????~?g[???????"?????t????? O6?+?nmon?I"N?X???9??~???U???)?M???_?H??H?cB??H?r????Z\s?1/P?u???????8????/??f??'3????8_h??e?:^????h????????????????'.????]??+?J?????W?hK?>?g?Q?????g?_????s????]??i????g???????z?k??>???&OC?C??W?????g???/??????????]??x=??N???"??K~N??y??????j??????:?????a??:???[?ZJ???'?#6s???_??G?????S?3?}??$??g???????P???Z??q _??????????????$????????????Q??E???????????.?????X????3?~????q??a???u?x???7??}?????????W?E??/???h?O? ?O???=? +?????#'???{/?s????g???????|???E??G?y??)??!??/?????????????????=O?OY?S?-I?8`??C???????????_????ho??? u_??|??S?8og?a??8??;???????=??"???1C??p???G?^??`??_??????????????9??-?????y?L?????GH?????y ???j?W?4?yQjpI? +kQ???h?d????C???K,?8?##@? ??|\c???8e?Q?^?????7???????????s7??\??/??2??n????h????????Z?+????????? ??????{U?????bh+?????V??????????:?-?!???c???;[?[?$??`O7???????????3 ????????????e?o?4???a$?3?^9LJ7???uz?ht??Zl1??WM????? ????3~_??T???t?t/2h??ye1Fh?????F??2H?H???;?4=????I?O??B??A?G?y??g?!???????r7????8??1^\??n??????g^????I_??9???f?s?Y??p???g??j?F?;$???????c?????^`??V?!????????????????y?t???q??????????qN???b?qY??y??~?????9!?S?_????????3?R$f??????????xnl???RIdD?`?????C?-???.??C?7?????[??#??h?FQ?<2??wG??=Mg`?-?E??!?????????,y??_??1???~L????????r??M2????j???fP??t?5?????_?}???>~???-Nl??8?q?!WdY?? ???-N???G??K~???????/?r?Y?G??^n?4??:??]?????????1?i?//~i?,~l? ?????(?H[E</???!s???w?t=???-g?????_??=????????0kI???_??; ??7?? ???\?>G??_???9??J4v?:???J|??ACJrA??i???g?g?????i??8 ????k?/????2??\?????77?????"?E,z*?@?@)????mei#??b?k^[?F??a>??_??????{>???w]]?????>stream +H????sG?5'Y??k?e???2???8? +$J?aljZ?P2Q??-I +????qH??R^:j?P,^?Q? m3U(/?????????w[???????????}???}?qO???W????T?}?S????z5>?[e????A_?Io??e ?V>????a'(?C????0??????}??E?????Q?????%@????z??Q V*??7???????Q?^?D??B??b??Nb??Np??uo?L??? V??????S??j?m1" @?D??!B;B?? DQ??aB??!?(?x?0!?r`????Q??a0B/?!1" A?F9$? +?N ????????+??:9D?u???D?????q??.Ah?1E???'?QZc?!?h????e{? ??;}p?|????P??=?&????C?8 /?-????E?LA#?p#?'??[??#?Q??R?a?? ?? :??B;??1????&?Ey?+??G5? ?1fQ??JE5?c???6??? $F??????w ????v???}@???*,?w?????u???8?(F???7\ ^??~?B??>c??^}a??-?++?????????+??? ?????{_?V?zU????~uU?/?L???+'????d???}O?{????>m}???j???KLkeW?#G???r?sL???????????:?RubRNf?_???z&:??QF.?M&???x?o+??z?????W???R?A?????}?w?:??y=?RY????8M1?G??~l=??J\?? l??????=X.~??I??_n:C??5???%??R?;UJ?^?>P?L~: r;???=]???8C8???a??p????z??xQ?????2?f/?v?{_??5??L(!?????????q??O???&y???\?>P?!,?fHi?^??W?A??\?9^S?[N<u?? ??*s?????????~?1??c?v??%?z??=/n??J?M`??!??a?#|Fi1vZA?s?tO???u?#??KM?Y+H??G?#8???ivNH???.rO?67a7?????V2[?a ?!????_l?????*p??M?0V=D?j??FM??!9?????&\Dm??!9"t??^Cm??9?"?{w?L?;??%?x'X9???V?????V?D???U?5?????Zm??s?????O K3???(?q?L?Z+v???gff?I????z??l?\#h?V)?????|%>?A???W?D1??4?????oV\??:BQ??J4?_???h???Xpx)???H???[????;?M5?`Yd,T????????J:x?<7E?giG?uRO?:pLM?????}?B?????ZD??yS?9?2gy?E2?????????l)???{?U`Ji????^*?I??t?ui???????????X?S2!/??\u/d?)?Bn/c?????3?;??G???o'us?1;?71%UP?B#z%? ?????'?e`?1i"H ?]????_ ??aa????]?> wc&????XtFle????????j?7:???n???V;m?|Gx[!?"BT?]?_??????vk c??????5l?_PqZ5?>m}e?7???I?Ig??U?> ??W???\?u?}?s?S?????b? ???+??0?t?4e?N?? L?J +&Lw?? ? ?a?????qK:U???[??x??&?,L???kf?o??g???&???U?U???2? ?`???????*?E??E2?J??7? /??GbN?Z-??5???3??J?R/ ??????|???\%?m?LR?B +gov?L?7B??W%s?l#a???c%r???#?6/???? ?Aj:I???I8{?v??R#???[?*q+&?? n(????/??W???-y[?g??????nOc????L????6?~RNu???y?lB^ +'???w?c????O??? ?4?^@$??(??$&???????0????\Ap????G??t?zw????? @?O???Q?R3[??????Y??lZ?A)??? ?& ??] l]??s???(k??8g???????????G?X?33@??-"?S?X??2?Bz???)?8?U%AB*???F?]EJ?~?F +a4,???U" ??A?~!P?M??]????!?*?7??Q?c??Z3*?Wa??? ?l?q[4?s!LK???4???oBl?(V??3u???xT???i???T???F???$?z?F6j?*?B??!?'?-G%??e?TH?'??&7? :Fz?'?{{????D6??L?2????D?????????X}??G?,? pwa<4????????}?.K2?x??a?y?????IU?:E)S?uc????]?????G?h2????Y????)x-???H???? }C2?6??!cVI??>stream +H????b???H2?3{???K???}? $??????n???x&?#?9???????#?B????B????????!??C?p?!?~??G??4~B!??Wm~kv??=6?="?B??U?7??A~??B!??^??U??Za{?????,?= ?B???k?????ok?9????{?B!?? ??J[?<O?,/????T?n2?6???D?h8" ????8'?T:?NqI?Bh??.?K?f? ??????X??`???b?%?s???A??/"72?? ??qZ???W?B????*???41?u????Z???mb?gg?T&?E??????d6y???<??^??i?X?j?b??a??bp4???? ?B??"??i?Y??a???R?B.7???????Tf?? ???Py 2??`????n???x4?{?N????I?r?P0 at f?Yc?8rA??w?Bmv?UH?b??????i????P,???+r???????h2a?o?????????^???3y?L???,?[D?U?9???c??)>?R?L?#?B????b??h?9?d&??z&?.?+????x2e??*?X^??K???[??N????k?r??"?z*?%d?q?5(?}???)B!?A? ??? ??3??2? q9K.???r?Y2???-??0???????<'??????d2?$_?*??!?????(??????Gu"#?B??????????st?r?P,W??v??'?'??? ?L,?[e?? 2???L&???1??5I.????Jj?X?2?@>c???S??Q?!??cb????f??`? ?S?Y???)?|?T^?Qe????l2??r9?k)=?c????`4??X??J??1???v6?N??^??j?k?r1???$?Y?S??"3?;????G[B?????A????`>P.{?,'??????Z??????d?*&?????5?<t;?f??J$?2???S??y ?pl2? B!??L??ffq?X>U,G??DR?? +?r??hu?}Qy?F?M?? ???A??i5??J?@$??$?????CKdY4V??S????Bm|Z??{gC?-qy??r,_?ZJ?????U?I*F???M??oS?n??ar??n????b>???xLH??????#????? +?W~?B9?U???????d9|qK$?L?P????v?7????3????p?????J?????Dg?f?6???x???T??y?/?<}19?????????a???$Cd?B??^\~f??06??u{}?3?????`?|?B}??,? ,??e?????x??\?TnT??? J?AeU???M?Da?????S??????f?d?LQ:???B??ub?@h4?(3Q??d?T.????h2#* Q&(?4?????????h&??b6????'?=?d?X??I7$#???X?AX?9??xV?>????? F?????D?h????eq#?&???#1??b??`2EH>???? ?B?v??u????rR9|P?X?? *???Z ???5?????????????x??<6~V?????Nb2m?L?A2B?o???????a?*G???,Q??h=?g ?_oD???????&k(+?$ +???????5br???fb2!&#???U&,?TvTN?2??R??lw??????i??(???,n?[?&??S?hr*??d??B??wP??Ye????T?%???r???? ???r?DIQ>??k?Eb?r> ??f?Zz??c? LF!??uIe0K??????QyHT?xa#???7*?z????,mt????5br?? |n?a???g???? ?B??.?l?U??C?x2????f?;M??/????TV?????PVdi#??b6??Z?Z???O'?????dB???_?B!?e]P??:X?7?&R??C?Z????????DeECy?1??????%Q?W??t4????j)????#!???:`2B!???*????? ?c??]?X?5[O??dFT6???TV?TY?k(+??Y??b:?:?z?\?fR?h8??Lf`2B!??s? De?L[m??????l?X?5;??h??VkA? ??G(??f?V?D????????j?*??}:;?lch3EL6?d?B??3? F]e??? ?"?T&[(?l?:??d??xa#?[Me??(+??5?-??a??X?????m<?{\N??h??3????B??;?|T?DY??u??X2}?/Vj?vw0????? J?P>?????@L?M???V?g?????p??fV?B?0!?z?+?o??4cs????V?7?/l$y???????,??z??OF?n?Y????d,??Y???P&? LF!???Fe?L[mN???&?w?B??hw??|?Z ?$+:????MVdi#??b6?:?F??????#!????Yi3?B???T?P6M???:X?/?%??|?Z?????????(+??????,??z??OF?n?Y??w?d4???Y;LF!?>?\???L[mN???$R?\?\k????d?\?Q??Dy??mY?[?&?^?U????d,??Y???P&?AG&#?B?:?|T???V;????d:?/U????p<[p???????P????v[Y7??r> ??f?R???????r????2??d?B?7}?Q6M??.????2?B??hw??|?ZoDY??v:??e?Y~??M??N?Q-=d3?X$?s?v+m9?|?B? ??|c????????p,???K?z??Nf ?DIV~??~??*??V??t4????J!?JDC~??i????h?Q?PF!?.uD??l0M???9\(Oer?r???F??r%lDY?U???n??%QX?'??S?Q-?d3?X$?s?v+m??70!???????? 7F?L[???????C??hu?? A?DI>???1y???? +Ay?s??x??U?laX???N?/?&???R??? ?????=??~?(?U?b2A??kGu?]??Sy??nTK??T<"??.?`?Y+PF!?>A???XY?&8\??'???j?????j???G??(_QV????????b.????~?Sx?e??B}$Q?laX?D?? E?\?\ou???|????T??(??~?O???????v?V?????P?KP?????e?B?C?o-????????K?x*?/U????h?XmvO?v???C?????~?^?????nT??L2?=NQ?q,??d?B?c???f?a9? :??`8??????C?/O???v??+?oT?&_P?(??r>???V?\?f?P??v?v??2???>?B?Q?-????e_ M???R??????BP>h?noQ&???j?a??(??h?k7??|6?H~?Kx???|A??!??y??*????W?O???|,???Z??M??R????Q??!??G?d?????????H,?????vo8?-?????S?_??|&(??FY?F?^?Y+???XD?{\??s,c~E?w?D!???(??[???????c?L?HP~???AY=j?? *_Q&"???????v??O?A??^)???h(?s;E;?Z-f? "#?B???LT?r6?????H? Q?lfX?.8???M?s?J????'??z?W???QV?v??h??nT??L"??l?^Q???B??,?????r6?????HS????j>?]?r!?ND????l??1?M@!??d?-V?D????R??R??? ??E?p?L?????t??JSN7???w????6>9=;???????f2??L6W(?My?4?K s?S??M??l?P?T?b????G??g???$S????L???i???????y6??t?m? ?;?rs2????????????M??????)?????????L*q>????)?{Q?????)o????)??vu???2T,UW?L??????o0Ly?4??????)??)??\,??O?)o]Lyj????)?m]?????????W?)?$S??3??S??$Snh?????)?6?o?S?$S?I??r>??? ?v:???S~2??ei?_?O?????|6s|t????u??????/??}????@_O2??tC}??5@Y???njim???70?`l????W????}????wpt??????%?L???d?????n?P??<}>??0???M?]i???Sn5e???????|??)????y:?:S? +?z??N?<4p???Vn???????R???????s? U???GKS^8????Ny???W?:???S?5e?ML"?y6???r??)/\?rc?r*e?P???T?rc2????S??ze????)o?2?Z? ?(M?!?r??)(g??Mn??)??2?~?L?????3e??)@$?????L?p>???????)O?O????1]_??]?L?)?746?My$L???)e??|?`?P]? ?0e??)@$L"a? S?H?3?b1<9?99>????X_??aia~??????????????tC}????55????tSsk{WO??????^??y?~?????????I&???)M??? ? ?0e??)@$L"a??r?\(?sa?G??????W?q?s???? +S??2e??)@$L"a? S?H?2D??)?M?????????? Uf? S?H?2D?? ? ?0e?DS.???'?G{;[_?????????x?px???????)]_Wz??La???u????????{?ML=?{?L????0????L.?\0e?"S?H?2D?? ? ?0e??)@$??r>???)????y?}?????G?N????\c?P?0??????0???)??z??????Oa????????|>o?PM? ?0e??)@$L"a? S?H??)??)??*?%SN?5????)?oe? S?H?2D?? ? ?0e?DYSO????no?'S^L???t??S?{???2]??P????)?on???)'W6e?&S?H?2D?? ? ?0e??)@$??r>L9?9>:????\??i???????>{04p?????)]_??d???;wkR??tcskGwo??????????>|\]????;8:WS??2T?)@$L"a? S?H?2D?? ? ?(g?????l??? ?uy?]??r.g?PU? ?0e??)@$L"a? S?H?5?l6sr|?Lyc}5Ly!??x2??0??0??)@?n????)?&S??2T?)@$L"a? S?H?2D?? eL9? O>9 +S???X[??!Lyvzr|??`_oWGkscC]???Nx???2%[????kH7??w?? ??>?|63?&?????d?G??s S??1e??)@$L"a? S?H?2D??)gO???{6????d????\s7L?OW??(L?n? S~?~?s???????YS??2e??)@$L"a? S?H?2D??)g2'?7Ly??)? (??)w?Ny??)gL??? ? ?0e??)?e??? +]??????rH?H?T* +!9??C?|???????t?w?4k?Y??}??? ?J0eT?)??L??w????h??{h7?w?b.{~?8S??m?6??????X?F?? ?5?m???nw???T:?+?+?f??}?????KL???)?L?`??S@%?2*??P?E??MNy<??S??e??????)??2??M?$?l?O?@N??P?M??????}c??VL?`??S@%?2*??P ? ?J,4??LyMNY??X?F?};??????V?V)?/?b??????v?m????j4?+`a???^o4[??????????.????j????{??O??Tl???3L?????Sb???M?|6?-??s?)?+?)???r?????{=e????)????|??E?);??????j4?+\??Oy??)?2???At'?Ly?j6?2??h??????)???r???U?7r????n(?S??&???S??,BL??3?BY?r?7?????SW ?n???U?7???i2???Sv?[??2KYY?htb?k?u?k?????????M?Rk?:??????)?:?f??\?g/????Hx??q;?V????i???2 S?*S??){|?pd?~|v???w?f???Oy?BNy(?????Wo????????r?/?l???=S`ar?z??d??n?;???.????j??? ?<|7??|??j?T??L???`??v?)? z?F\?O??WY?O??L??????????rc>??S???V?V)????????^(??t9??kr?Z? ?b??5:??$??????'?L?P?T?v???'9?????????;9????xl/????v??d4(S??,`euU???u}??? ???????L??\?5?????`$??Ev?[^e????W?2??"?????&???p{????X???"??)?????l?????<?{b??j?T???O???`?????Y?L?NN?+?i????`4+S?nw"??????|??Z?o?t???S????r?R?????&??{????r?[?)k?2??Wg[j?k???D?q?-g-?YDE??L??????dw*e??Qv??=w??? w?8AR???q>?)????`<[nD??LYw?|{g??? ?z????j1???j)?Irl$??}^??y??I/?gB?K?s??S?7?,V??Q?\5??????0??3e?>eq?????S??K?\, ????1???;????N'H??L9?&?L?\kv??b???gE? ???O?y??????????a??T-?2I?????K??;e?2?!??1w?t ?q|*[?????x????????p??}???(??????i???????a?OS$???????N???^??",?L????vo8?????tQ4????);W?-d??r>I?f9 z?z??K?\,bh?K??2?A?)cI?~&?'?L?\kv???b??????N?~3??/S?Y&2??E>????d?k7??\FH????>?$0?2?1?'{p?K??`$???l?Rov??r#J??????7O?6e?>e?|?v??t??4??BV???p0@;S?=pe?C?S?????P?M?\???? ???V<8S?t???{Sv?l[??U?|???r6~????b.???H??}^?`??G?????)??`8??l?\kv???b????????M?f?????????a???????G??M???+p?2??^??",?L???z??/?c0e???O??I??`8??L?\m????|??????Y????o??\????k???v5{?F??? 6b?4E?? ????? '?>? EX?O???z?3???t<+W?@??:??????/??m"?NY>????d?o7j?BF???p0p?2?,?y???}??? ???r2?w???=?f??(?^?l}????-? ?z9??f1 ??z??M'96bh???1?g?2???????@0??B?P?5???d??????:B???w??\?2?T?,K?v5? ??F???|<h?$0?? ? ????? ????P????|?Rou??b??????Y???y?}?????]/?Q???????W??t?c#!??y ?|v? Wp??<'??c/d ?Z??N???(?gEu?lZ?S??>e???O??]?'?^?Q+?3????"?_?w???'?`AR>????d:_??[??h?X??G?r?t?L?y??)?W6?5?"???b:t??J1?Nrl$??}??1??2\x????=Nx}???????R???'??V?NgE? dZ???}???-???O??]?'?^?Q+2??"?My ??????d???p?M?|?Rou??b?????????L?Og?7?DH???|?o???h?m?+?|ZH????)???????|{2???E?~??ym?RU? U?0 ?0?g&????`$???K?z?s?Mg?r?? +??`?7Q?U???(l7+??M?~????s?Q??v\??,??????a?a?/?db?9?|???t?c?L?X?mv?}jB???????^:??O?2YUP~??E????,c?a???$??&0??v???????>5????????7?LT???N?rk??MG?~???U???wUF?1 ??O?N?j?????d?MvL??6???x:c?5???$???|T^??N????r????? ?t????d"*??????a?a?qg?? +???f??FL?9?\??`?????p`?^:<M~ ?D????skP????Y????VP???|?2??a???='?\&Y6?~?p?&?T??nO5y???f??LVT~?eQ?7??b>S}P?^?S?XXU?~e5?MD?g,???a??.?N????l?????????k?v?~H?i?]?7? ?2?Oo0??2?|?a?Y??l:??mP?????U??M?2?a?I:{I21?d2[?W?k????NMP??|&oL>?O????2??<????Ae???t?mV??LT??z?2??a??I?}?d??j#&????????*??W?c?????(??f?2?DQ????N?"???r\???????Eg]?0 ???^?l$?b???n??G??t?pb2+?,??F???2Q?T??;?????IE?7?l?Z?G??e ?0???????? ?d???p?=>0??2?B????????|D? &+*???T??FQ??U?W??\:??~???t??WV????????(3?a?1;;Y???kB??Eb 0?X????l??a???A??L?????t|T?????p0???]??fd?????g ?0?C??.?G??$;?7?????k??<?????S???*?t?[???r$?y ??',\?d~I3?a?}?T?t?u??$?=^ ???|?"??{??e?????????l*???~????4???|??H3?a?}?t?d?ODVIv??@0????b?z?huT????&??jOJ`2Ay'?w?\????{#3??Fp?C ?B?]?i???d%?I.?/?K?j??$;Lro????$?9?|wP??*??????2Z??`2?F?????i;??7a?xV?U. ?)??????7!?????[o? +???????\(??+?d??H?????pE????G?;VySfy8??,7j?r??X(?????g 3??4??Nt??E!?l-?c?9&????W%r?\???)??"y????????I?~?WQ4g??????????h??v??p??J0????m??????F!????"?>*??c ?i?LD???.?W?uL?G$a?$+?????\e??2Z0????d?y?m?2??d&??b??g4B!dyL???Iq|?3 Ke-?e?i]?o;B??H??p??L?6???&oU?,???(Z?a0%?G??????4/?e???h??????B!do?W_c????;*?Iefy???????(?????ld.????3??>#?B??u????r?? 7?+??E???h??]?GrZea?!??????;???[v??\1?"3?\?T????B![ST?Z?rE4???? ??j??]??<??LN??+LN??*?Y??e??1)?????BM??h??@?Id??B%s???B!+?C??~????;uk? <#?B??????~{g??BY?+?e????!???5??????B??I=|Y!???em&B!?2?_?P=I +endstream endobj 33 0 obj [/Indexed/DeviceCMYK 103 38 0 R] endobj 38 0 obj <>stream +H? ??6P???*d?de??7??7!D?r??sC!?_,???E?????o??b?g?|??{????k????s????n????v?l????6?h?? ?[g?5V[e??[?/??U? +endstream endobj 32 0 obj [/Indexed/DeviceCMYK 181 39 0 R] endobj 39 0 obj <>stream +H??IL??????2?:???m?N ?Ng)-J?J (X6 e?4????QAJB?[?$j"&??\\?b??x????7??~??s|>>???????w???[?v?W????J?f{8I????*??X?????a?2?n?(??^?t?R??^>5??s?C??y??0v??BK???'o?M?m%T%>?0e?8fI??)! +4@????&{l!??J?d?g?jt;F?l/??H?7??k]?)??>Ob ??B N?@???7?U??H????z???V?-?4_?????H? q???>stream + + + + + + + + + + + + + +endstream endobj 25 0 obj <> endobj 9 0 obj [/DeviceN[/Black]/DeviceCMYK 41 0 R 42 0 R] endobj 40 0 obj <> endobj 43 0 obj <> endobj 41 0 obj <>stream +H??6?3#C????bD 2?RR+2\?eE +?? +??Iu? +??f?(4*???Pi3?S!???0?F?4?[^?_?P `.?G? +endstream endobj 42 0 obj <> endobj 24 0 obj <> endobj 44 0 obj <> endobj 45 0 obj /DeviceCMYK endobj 46 0 obj <> endobj 47 0 obj <>stream +H? ?ykp????C?r?ard?X6?b?\9?e?W/? !?5f?w&M?6c??y ??d??Uk~Z?a??m;~??g??CG~;??_'N?9w???k???u???GO??x????O?_ ??-: +endstream endobj 18 0 obj <> endobj 19 0 obj <> endobj 21 0 obj <> endobj 20 0 obj <> endobj 54 0 obj <> endobj 55 0 obj <>stream +H?lTyPW???Tu:??t?jEM4^?AD????p?? ?e?P???u@?(???]?#?k??k4??????????lm?n?jk???~?}??????{?????q|N?????-?S%P???[t??x?=??gp~???#?hg???F?Q?????K??`?uo????????????q?^???-??Z??*?T\X?V?J?r[Rj?A?F?S)=9??$n?=A??PiU?4????x-'?t?R?,?$r?.?? ?"???C????(???PBbb?????? N???s*?N.????K?k???x??D????a?v??R?a??a????\pl?-??%??????S??0???:?.q???hWI ??G?P< +kh??u??????W??IyL?????)???N1?h,g?Nw???RYIU??5?@?"1??:?F??D V???_Y????[?? ?????x??{?i?R?yw[??]???b???, +??F?????A?9`G?i-M?/??x??M@???m ~?`??P???;?D8r at S? l??{?ad??????W? 7y?:??x??)O??(R???????t)?#rT?;?w2???WA??1?N? ?_??r?? 4?\??CIu6??d?(???K????Ka?vP}??ak&K?/?? J?????8??r???r?! ?X???y??@??????Lp?,????E??S????-Bmn6?rwB??bYq:?d??H'?:?/?0??G???R??\?q?=?n????v?^????Q?CJ??Y??4?j?6??l????|}???`??U :???G????#??&?] ?^_]ZT?V?C?MH?o?V??? bV?w(?h ?????{f ? ?2?0?*#?*?8M?? T??????a??????A?h?@??M? ???u]??,e?0????l?5???m<9x\Vi$P*Y?A???C,??J?????????0?.??@P??????XIn???o???????t?????n? s??!?f_?m?/????/}?? ??l2\?2V'Hhx?r +???b???E??./??)???TMcO???xj??}??:?l??????$zi?x??%?D ??6]`??????R7??%?0??3/?d????B??L?i????????????3? nV]??W?^???????%?;N???8}.(?CG???5? ?+???????G??????????V#?{-x?????9?Ol??M?? ?e?_?qp??k&??^xI?*??? H?V???1????0?7l?`Wo ???Q0???3p??& ?0???? +???5?U?0??n??????T?@% ???????v??'X;?rWc?? ??ZZN%????c?C-Ls????????_?WX??????nxv?;*??'?"Q?Mfb??Kwza?M?t}j?P[glI?OD?'? 9q`????#?Y??????F>6 +????????t??CL????J??w?? L??PQ>?????\?AQ]g?h?7-?7)?????3???T??Vm4meJETEEP ??e??e???+?^?>Y???, ???(???c$?Ib?$??`??? ?|=#????^?x??????U??\VV??????"??^lL?F ??1? +????%??yw?t ?\+\?V??P????I??w?b46??e??]?,b?1$???????i[???8egV??I1G??j*?????w??\1??????$D???E??4?DXWWYj,s3}?&?^7jo??myYi?iG +?.O????qB?T?4?a?0 ???.?????;}????m?n3??8???C?0???`D?;>+??~>??/??!o?Qk??:9??Q|?+??T??W"???'X-???3?J5?5:Z?%????=d? 0}?m?C??/n?-???x6?[?.-???@??2?0????R? CjY??%?m?]JE)????].????:G??p'0iM +S?????&'?Y??? ??+JuPk???8J1?:?^?V?????g?E?ih????{???j??l>+L????/???&?IVE?????%Z?i?????O@?f?uP?7??\Z??/?x;tZ???\??`???uL?G???? &?Xs?????? +?g??{?y?????m?r? ?*????????p-OU?{?s???????f/? ?J#w?:???? ?fs?J???<|?????m???A/????L<?jm?v?3k?????<(??????i???????[??`JX???5??p(?w???? ?????W1??T/?#???'????!q&E3$????%???s??U1??M?????a??O????u??7??\6W8)?????[}I???B??/?Gy?t +endstream endobj 52 0 obj <> endobj 53 0 obj <> endobj 56 0 obj <>stream +H?TS PW?ff?p???8m?;jD4j?5?? D@? *8????3?D?3jD?x??@?q8<??x? +. ??I?W Y??o?M?nc??j??????????????V????5?^??Vjb?u?Fa?J]?0??g??????\P? ??{?????????j?1????E?????'??;???BwwWg~???"?????? ?????&??{?E???:???\x??~?8??o?4????2x??W?F?Z??????.????tq:cr??_??V??5;??^#??8?(] ????qF^'??c????Fh>???yI??c#4z?ZZu??????m?x?????????&?p??&Za?1?S ??c.??c?q? ?V~????~ ?+?m?|???wX+????????V??e?\-??"Vq?P:?69?|j?o];?vB??&?@?[? ?_??????>?}b??A??|;??&?j????6Y?\?}?g?%???hR?s??????r?????I5!???;4^)+y4??%LGSa?=???LO??e^????~?%?m1?,?Q????Lg*J a???9?2????0*???4???QB?g?? ???&??#uBd]Lk???? Q??q??4\K?N?????#?3?t???T???{3Y???V:?3? s?w???S??b?;P&???L????e???????hia?_?U?????G.?ef?O(??B?O?E???S+ 0?E?B??? ? 5U???fS?g&??M???_ ??!?AI??bk3?&??? ?n +?C zs?r7'?;POr???8`-??L.-8PW|???; ???????w[?vp}?Q??4Z*?)?@| ??Z?Esa? Z?f?[ipz<8?yh b????'??i??? ???,? ???F ?T/?@????????? X?q?u^y???tmn?>!??z(?&?????Y??[?????C??>q??K? 1T?1??DP?d?UDT_ZjN^???`????g?^??_??x[?Z?S!?i??'3?j?????=r?nu?i?1?-??U?H???>?h???fJ$z?????` ?t?R~?????M`??A+{????????QB$??????c?P??? ?G&?.Z5#!a ????m\????|????5G??j5VXr?8!?2?|??%+??vv ??E???????%.?d?`?? '??q?Iy?????*??NwV8?2^$O\?+q???QC&!????j3???|6?B?iH>HF??????_4e"MiISA^}?'?*??~????G%'????1???'??x??q8???????????-??X3-&???a?Vu??? ?G +O???3?\?Z??}???l????'?)????? ???F????4%?i?y?N?z?y?u??????0??Nxj???fF??????lQ +2???j?:?o????=tB??s>@?????:?????j?i????9???ZR?e???? ?D???\???"54???????9??T ???)?]YM???????x??ga?iFs??v??1??z?5????[L?????]5??????k]? ?????T?????1Z`???D??<[??????????4]????|??Dx??(??,????JZ?v????????4?{????:????&j?ro??S?j?0?n?3u?A}~0?s????D????p??????n#?v?@?Xl?t9^?v?n7?8???9???)?uf?a=d!????7;{?_???=??)?l?8x#?P ?=Yd?;??T|+?M&?????????N?????? ?????Hu??8??6 ????PIO`%D +? ?>?? +;1iv?,M$??c??u=?h?"?)???U,???Ku???^B*j5;t +??WD?ZcSe}[)?`QGCN??0S? f +q??V?SLc?GS*W?&???P)?*emc???????????|???E?????-??$n?rk??-$c???????S&QJ?N]6???J1??^A???????y???)????RbX at _???7?Q.l??&??:?|G??????s?'?IM?:?]AR????Ue??D?w#?B ????zI?r???m??9????y?:????'???H7???t?~MRp????8??????B?5G?????#???x?N?H?tW^?7??2?H??db?????o;?\?o0eK??`??7?U?p????~?!}???j\X%?Q?[6?~`4_?X;?y???WA?????aa ?? S& \???O??? +endstream endobj 50 0 obj <> endobj 51 0 obj <> endobj 57 0 obj <>stream +H?dU{Tg?I2??$?PL??9?>P???? +*?X?-?M?!??R? (/E??B??D???*?4??OP(?]P at W????????~??NO?{?g??????{?~w?D ??ik???F????$??B j???>?9?{???7Uo?p?^8]?P~_? q\4?????.%C?????.??p_?.vw_?z?u?64#??IJe?''??):}?A?vc??mcCl?l?&U?7????Ym*??qjMR?~+?KdU[4l?.Yg?H?????hu??^?Y???5jV???ca?l0!???%?Y??,? ?IcR\????? ?!??x=?U?^???&????-?U?"/e??D ???`" ?0{L?a3??'?????#*??1?????bAX0V???]q-?~G ????o????*a?h??Q???#~!??Q?J?-I?%?S?S +?????-??< Q-M?b'??v??$????X??$???r?#8??+?|?;??o???%5%$???l7??+???*??vnN???H???????e?P??2=? ????J*VWO R?EZ#Gkz?R??i[o??#??0?3? ?1?D???0|+??!?;r???o?s????.|?a???H??Q????x??1H??m?#{??? +B?#??1?"g????(6s?M?A?-? +>P?v??/?r\??????d?P?T??Uf?}?Pr[??F??+??nq????sC?Fg?c? ,?E?|??@GH???oZ,???????Q?????B?N?r?????}W?W???O|rvh??j?n#?l?AWwv5 +?p??????SPOo]????~???x/Z???????j?0?o??'?? ??=?5z?Ls{??3????W?T???'?[.?iW\n??P?G???CU!?????[y??@??n???:?s??Q??? ?J+?4$$@'QJ???A??bl??????????>j?j{7???5X?x?p?b?s?B??U?L??7L???????n??O?DnO?(~?????????$?_u?0??`???? ?e?T /??,?)]v?*??!n??h?? ?AAr??0 ??=?K???Q???0%???~???$?iDl +? ????J?V?x?|?qE?5??,?s?????b???"rh(?qt??#G?i?_u?{XH?{??v?j?F???Y;U???FL????v??+???????^??%b$??~??????????????3/*?W???]??S???1?+d???B;?>e?iV?$?B?:9?< ^?!?/C?(??????;/[?m?U?3??? 7???AwE?????`+???8]Vr$w?????t7 + ?wUdg?u???vX?d??`G:?4?%b)Y??& +??????{%&?2g?U?6>?Xaf??"BZ?o ?r)??M ?? +???,? ????#TC.8???Y&??? HXv??T7??0?b??us????-?T? ????-Q?>??G?Pgr?????J??rMM????????)]R????}???I3?A8iFR??0???*/?r?4?????1?? ?Y???K#T35:?????`?M??]????????d +?A???R8 +??H1`?z:???;?p??y??Dk??x*?? 3`=x???z%???N2R _?O???&^????.???s??:??-?0&*P?$?gB??6????&T$5d???????,WiPTW??.NQ??????~ qpF?Qq@?????????AD!#4??54??tC?"kC???f?? (?PD?? ???IB&F-?9?????X???s?=?|??.G[?R??r??????B !P.?????Kynn-k?[Nv????ny???(x??3!???m?7???B???????G'e?6??~;???9??E?kO1\5?^??c?[?????8h??S??,}R,]`0?? ???Z??=C?m?Z?w?o)R?sfu???I?O????FJ?V?5? +?n>?_p???^?0 ???9 ???vp?]?m?31?U/'?*T@?B7?gju????M???mf?<d???z? X2 ?B?2$?????2?{?? ?? +?3?Z?mA'??????e??|?bv??w}?I?[Y??????w??[qHkPsL?/?!Wy?w??v?6<0:p??M+?>?|?q?????Pf0??c?Z?????o-????*?.s?m???t??Nu|?*??H?? ???7?? AH!?b@ ?@ ?G???*???V?b??Y?Y'?a?s???W?Z??u??{?????;5?>b??O?d??????P1??N????????'????_:.h????G?pO?@???!???W??s4??? ?J?#??P?&?(?q??p????Nq.CCX?y?????????q?(?#?c?L?x??Y???1????|x??@_??MCXu?e?1??X??o???]?F??p?H +?}???=?4\?}?Rr??8????]???xH? ???|D??z???VJ?*l?V??*5??2fT?????g-????'?E??gq?????i?x3!`Mnv_?o"?V*%???????????????i?? f??????-??? ??v????F?,?Ao?Ix7+?????6??,?7#? +q4?A[ +?????/????{y??2??????????????r??qG???????????&?bD?i??s2?%:u5?Y'?e????VD2???0???????? +G~???g??{??oc?v??T5?U5?0~~#?????Y6?T*?ju?6?V??gpU6+S3Y???{?sf? "9!??.M??I[?c61??)J???k?v]?1????/?QE???l??K.??:-m???Q???N=????V??d????>?????BcG5S?_o??????-????&?5?? |??Lmy +??~?^?M?????3????6??)xl??????? ???`?;? 4?/???:F};y??7W?!?????r?c8w*baR<+q??????2#U?g?t????\?|xo]Rt????c)X???Ga???a??y.T??Ke????$??F???]?}?.z?f, +Y?%%?????Mo|??O{?0?;NM+?`2?????FC??nj?j ?f??J?D??^^[Z? ?U??}*??X?/B???I?e?? T? %1X0?R?d?J#?$T?& ?&???? ????.?3???&?h????? :??!??fK_9???@.?0??uw?w?I?@wFL=????????\z?.??U_?_??????/??R3?????M?????\?? ?7?mxz0)???.? +?Q5?!??:j???}2?????Y??)?$h$p%O???H????'?%? R?K??y???R?+;?mO??E+)M??MKaD+?,?ua??xq????R>???eW?c]\v?A^;?8?6??RbA;???9G??|??G?SVg?-(???i/??i?0???l?F!??'?-??? ?????M???????eRe^IIh~~dc"OjV?z????????????????Y?+Ye?????^x??????k??8?!??Jq:?>?@=??? 5HV??'? X??6??N;8p??#\n?????r'?p??Iou????y?????,qAn+?? ?5???Z?*57-6?A????-?{vi?Bm??> endobj 49 0 obj <> endobj 58 0 obj <>stream +H?|T{XW?I????:;????U+?????V??????V^?D1?$??+?E???"?R?????????????|?6?????&Z[?'?VF?~?N????=r?@? +"???A??f?4??q??? 5?????`t????-??<.#?8??+?4??`/?!6?t??l#???qB#w?b???+b[>?s;;R=?J??q?R????eL???????cTX;? ,??Oaak??u?????2SdE? +???g?M????X!???????;?:#???j3I|?!)?t?+????9]? \?{?V???*?K?9????N???????>?P??? ?e~A?\RB?a0?p".???m?N??w?+>=???????8?=????R? B??^x??m??Up?????=}?????jXzq?d??2$?\?? +MA?g???}?mI?????f?+bA??!}?A???3(?'??x? ++?tn??0 ?a*???????T)?????vrE???b??:? fu?/????c?p?C?-??s???c??????????n???vy?Iu]R??????~??a??C? ??z?8o?z??Z?i??&??[P????^??Wo8?_??PLi?????c;?3?lO???rL?????o??_g^?Z??O???WE????Y?? rN5xS?4z???E???t?s+?1N???n?;?[?:2?U?=????k???O???P{m?FC?_??\?QB??? ??*<)8T3??? K?u)????7j???Gu?? !+d???x??}??0$v???Ab[?x9B??Q???*p??PP?L??B?ZR??+?kV??? ?TK??=??J?&J??4p?@.??#???????B?|??z,?? ?)????????'??R?/?E?\M???y??P?_~???l??????????)?\-??!???}A???????d???P ?????? ???}7??bx? @??N?:`'????? +??&1???8*}?us?f??~?~?r????lu+|??S5?j?k _^#???B~???)u???K!??r??V??5E8??m.m(:^?PW?????W??????> endobj 15 0 obj <> endobj 16 0 obj <> endobj 10 0 obj [/Pattern] endobj 8 0 obj <>stream +H??W?r??}?WL????h???J?J???_d????S)??$?$A?d???sz??..n??C?e??}?r???0,????? v]&C?O?$^%?~????gN? ???<????7?W?E???Lq?T?+?w????}????%???R?}P??j?iCMp?????L?4???E$???8??x??P?x???????L??p3A~7????V?T??=????&&?v%?v5?????$?3?Y`N ?I??G!?B??!?????3 IcZ??@?y??(+v??n?9??????h??d?^_?t5g???u|????QY?KV????nR??A?i?d??z\D???+??????^??j???i1????????A(????s?t4??/W?)?M??>?aD??/?q?^????bQ??W?? ??M'?Q?g*?????j??????????s +????/??u??o=?km:?>_??? Hb2??6???&e?6?;??*???b?o?Lx.Wt5?>,?&??|N?????)?y??d:?&DrW1s%?? ~??^????=?:??H???o??=3?]:??X?? ?*??? k?m<-???tk???Dz??????s???? ????!???d(eX???????z|t?~?sO=1?? ?{.?;z?????!?8]?)??ORqS??8??u?w\xIQ,c.??3V?("j?*w?}???|4Ca?X??Z????? ?????l4?`%B?W[?QL6#>j??M?g1g????@?y9??hVg? n? ??a??B?? +y??s??????;l{*p=??s?a?!?*[i? W?????N?)?^u?0????S?????$???<I?kMH??? )?X?? N9[??W?U??U?'?J?????0??????@HJ???i)??.s? ?bJ?Y????????e????Y(?#U??!??y? 6?P??>L 9??y?R at mH*h???????s?uf???(>??n????7pr0av?g??V??i4eI? ????]EId?5????????)? tE????`-?g>_+? ?? ???+?,Q??*5??6&?nk?W??V??5?q?Q?$A%3????6??PlW? ?>? ?????Pn?????1*O??????r?2????V??vx?gB???????,??r_Q???=?\?*???u???T?}?p?? ?*3?L??????Dk?Ft1M?e?5[?????"?3c?K?????~?OKA5c?? +?pf???25????N[??m?^?????i?}.1?v?s?BQ????????C!QB?};???sH?AG?N???? ???? + +n[???e?? ????x? +u +Y}????B?N?nV??_?Y"??f?1z!?@????|;? ? ??x???B|?S??7??7??2h??V?0?G?? +???6Rp?L??wPV???X?U?W_??GL?_?)??,o??c?????u???^?>??]K?????d?.???B??Oj?j????A??I????K????hS0??mx??????g]??G"c<????4? +???X!??YCh?j????????}?QG???(:??y???y-?[????nr-??3?-?)?\???q???F?%EQ??h?@??r?K?r?4??lI??q????O?T?a??-?? ???;?d,Z +??8R????uD??m????$?}q?&??\?j5?{?*KJ=??kH???1W?yG(??_a?B[2??r~?'?`?p?Ggv? ?2j??Kzo{??Wo{??Wo{??Wo{?`??????ps?dm???Z??LnZ,?d?~Y?8uKf?#b???pN??Pdy:m?T?+!??????0y~?????iQ??,??#[c?'i?k{??g??q???DR;#????F,!K?[`5?S!????\?p??3??VT.?#???2?Iv?T?[m????? h&/K?B?B??L??????^?oKI??????3~?\"V???e???bvN ??Vn#?F8??o ???? ???2??G?Kx/?D????r?? + +?????1??>2e?x?????oL??????}??cx?]?s|dy#?f??F???i]?+i???rz?M??? :X??!?4z?/????B??????X??mv?????A|??v 0??\?7h?h??J?-J???c????j? lZ,?? =?^oz?V?'??_?#+??F????s VH??;Z????U??-???M?B?{??{???? +?ck1?-:H???X?Ju?F????l_?????c*???KK??,B?Ac?Cx????Kr????q???,????-/ia??e7? ?T:*T?Y??T(?P????-????K????? m ???^p?Z??r???????E~?W? ??s^?4??L2o???0?3??????et?su???)?? ?~?? +??7#??d|x?mv?V?c????R?+?????"*?U?{6?>??$???f??????\?kO4 d???z???.L[-D??0?q? c??? +T,C:.?6>rW?{???m???o.?QU???~I???r%?Kjo?X?????#'??}??>??u?_N???fZ?????V?"??c????}????????Lk6?Q3SHX ? +4 r??????????CW???d????:]??8KRS????????`/?~m?????????D?????e??T??*? f??1CH?-?g{??????:??S????Z?y?=t?kW?T???f???p[?@?J?i?B?{??bo:?|???{?>?v????kM????K?Fd??IG???q?q????CQJj??????g?????l?G8C?3?L??:?sq??DQ?r NZ-??my????h??'??gR??A?,'?V?}????????# +EdZ5???L?n_??13???o??????Y?wLC??(n?MG??$+Z^0????ET`v???rw?P?:??fF??????????@|???0;?To??-P??[?jF? (u?A&???zI????????????????????????!?2????pN???d)?<f)?l?8???,0?????Qg?? j?? ?5??? 0k5????[?Mc&"??SRz9????]????#W??U&???g.x ?!W|t?Q??R.!/??7???*8???st???]j???_&9?I??)??????n? +?R.??=???o???$? ?lPbQ].8k?M?f? +X>??`??/???D??dx??Z??c^?????/?????T???Z?7?? +U?`?P?C?:?:????`v!?kv?6U}3v??X?q??8?;w?9???v? V?ER +_??j?x?+???`W??????4??b????B???)tO?K?O??????/????V~!h????? E??pe??j??K&@???????,?m?????bugU????*1Y?A??`??F???`??h?b???{z?j_|?????H?r???b"???3??kIJ9-`?Ncy?#???^MG?&1E???h???{??}AHP@?>_?A????{hl????????.????I????U?? P??_nG}????^)Rv?? y?????s.?? +?????I&?????c-?n??[?J???{??eV|???????)Q?1??????????K?`?Q&?*??Y?{qs[???z?r-??pm??`?Kq?MZm???&?????F??Y??Zk??#????zs??V?}vX8?d?Y?O?6??]?]i?/f{p_L'HP??????f?bJ??x??? ?|X?f)??????XE?~??O!?g??{????N|???^?9??_0?GlM??/?????N??R+rNj??^!?n? F1s????7~pX?{G?????5%?u???????q?g.?Wn??j???+J?^?X}??U?????S at T?s??u??\l??`??i???h!???? S?"????}w:E ?P?$?L??[?L??????\??pz??] ???i???r?c,?????q??N????8?08?N????8??;'? ?r??]?IP&???H???Tg[?iI???? Q-"!?B:, 4~?????>,????f?IG??\????N;??? ?/+:}??? +??lhV?`d????S????a ?/?K?????U?????*N?q????j?>?xj?^=???g/???[Ub,'???6??`??Uk6????L?]?g ??G????b???< ???l(????`qb=?L??????A????u[:=qx ^???4B!{Q??7W =?5???O???!Tq? +???0?? ?y?4?2????U?fcD?G e????????????fa??z??,xS[?y??~ )V???d??Et??^???] ?? ?!?XO?T?5?? i?"UG??????~A4H?6????bv_RfL??2??rN%?i}??"#????I????2??????m?DI7G??j?UE4Ap?tjUc?&3t?Au?? +???N;?? +O9?Yf0?e?????aFX???R%?4??IkJ???~9??#"pB???_?N??@M??]%z?Y?=???Kf?(vb???????d??3???????d?P???6??nEjk????t???~I?????d?y2?B?~:,*?IT???gZ??????????g>d??|?x?~ ?C6*]d???EL????+?&d??j??a???????sr??.*??Yz ?B???P???g:(S5? ?zwWk`?p???u???Cb?W?;????^ N!iP?|??"? ?????p9^ Wj?jo??7??05W????r==?ev@?+ 2?????V?J ?\4??????$?w?}/? z??+N??Y?eov???????'??? ???Q??KX?{3qA?????????W??? ???A???$??? tEu?D?]I??yv/?????/_?b?Q?&a??|lMa??:x????[??I/OC??.?yN??v????q?x??2?F?Hj?(??fg?#?????H$?I?y/?)E??H??????E6??.d?x{d?o>?g?!?????@8 F1'??SBv???yXP?^?w;??[U at C?|?$?-M6yf???%Ln/ ?qu?>o??>?=%_h?????N??D\K???i????????0[3?`_????????:????P??}??u?????w?z????*????? .`?V. p%h?? +?3?Pkas+?????1???B?d?????.?Ng?k?1;?p].9?[?k?????$W???%4` o??;'.??*??pE*??q#????)W???W]?u;????L{?-?l$L!?R??P? +S?crY!f??p?O?????f?????Z.o\????}?F??At???=????????[???\Y?&?Z?5"B0~K?N?Y????l???? +?????D2?????g4]???????J?u?R?WM)?V4^R?P?2g?KGGL7??U??g?=??_?a?????A??YG[3??z??bj ?Ho?? '_pV.&o?\F"&?)}x?1?|W??n??d?Li????#v?J?lk?????*~????????????X???-?M???|=4???e ??`???????????????B^???t,????????S??0>W?9"?U?O?&?Yu6???MJ-???s????bYP*4m??????`aK/? ?X??????%YA??8Pur?f?Fi +?S?? ?^?Z?C????.'pf&????[ E?=?9??????S?>?Y???g?7?NE?o?????(wv???p?-]047??c??????7o?? ??*oQ?*DP??Qy?????ja???0?+# ???fei? ,n?0????UPa?P-?v?cF? ?Q???#?0??C??????w????p???k?? ?R?T}.T??d?5i?????@5d??z?H??/O???????oJ????Xw? +e???{?B??S*??5??~z??????? '???48,~?j? |?c]k?????? +xr?D??)t6??WAx?GI??dl???????u????Lc?M???k +N???!???"(#'?????^??-??.? ??? =y??uj?Y\???.Tv +??6??M???4w2 {?nz??"?[ qU?????^?!=?EE????q?T????3?CR?~??DT?2??pB-???)?E;?7f)-q?AC?Dr?N?zF|??%??K??zh??R?h???S?O?Ag????{?ES-r??cv?5?"B????Vw?A?G?qz??A???f?\?W????N?F?MtQ?-??F?l"?????2?????b4???&????J?u?%?WM?Y;????Kv{?%??t??*?? ?Mm???K?9???,%"???>7.?/[E??N??=.??qG???????5??f?vD?????}I??*?_?U4?Y^???A??10??r?6??[?k????R?g?]???#??? 9??)????[0?|??\??B???d??[???3??1k?&?6?????? i???.???U:2??/??d??NA?3?????2#????T???????RR???[??>bc?G???-V ?V??E?z?Jl??,?W???a>e???j???@??,F???"?+o???-B???!&?????R????Q?-p??_?y? +?s??"E?w?Z%?^ ????C{a;?}????????kI?=????:G?y????????????:??KG??l v9~???S??O?D?????????x?,??x?????F?4%?y??d?????E??y???-]H?k?GK??#??3????q?^??X?\?????M????k?y??RWC?%??p?s?$?????>?T?Ar?o?5H{? +?????/?\BW???E?2?Y(lA}???!T7#}m1??????{??)a???????rCA?=??? ??s?? +?????B at u??????R?#S??y?I?P??;? J?kWj?????????-??s?SC????C?RB??t7C????^7?????]?ZP??%????????r???i??;l?y at 3??_?N?F*??p???ONq???J?H3]??Y???&E2 ??zvA #?L?K???4?: +?Bu?H?? ????wG?'5???1?M?z????W?????{!Y|????R_??$??d?j?(?SY?u*?2???a??Cw???(???Z6F?K 1u?G#O??8?"z? +w??.?Brp???[?kaqRX]??mj?? ??N??h?/??a? bB????b?<(?? ?!-?????I!? ?Y}Z;??3??????V ??,r??S?????P??UaY???-,???A+? ??v???f??^??X??VJy??ov??-??eQ??o???F??a?fe?'???#?"?h?Z??)4??'Pm^/???Qt?V???i=??*??-?T??|- ?A??}?:?+R?LK?`p??4?????HX?G???k?|6z???l? uD?????B????????M??c?Y?????>V[N??????!!???^'w???y????xm??Mez??)???:?2b???Fb?L??O ??G????=??l?*??t?,??? ?.?? ?*~=???*>???l???????w??~07??Zm_??????}??.?o1?? ??~|???????????w??t"@k&}?Q?@??V?2?a~D"?D ?#??a?????s T"?? ??q?V j:??a?%??7??_b4=????(}3/,?u?s?^dXP+??V????X?~0#??????F?6F??Y???6???9??O`1w?W?????%M _??l????4]?8????I????+R????????\??U?q4T!e?1R??&^X)m?Io????????????i???$G??F?$Z#???}X?K?dT H??"+ at D?}?m??B???O1??X??????? O???E ??????7?>D*3 l??V??1???B?g?U??FR6?@?v??&????E??9J,?^???[? +???????g??nf???njN$V??x?KoKb???R????????;B?H?.c??g?D:?f;?$??e?-k at l??*U?]?Z? ??QC?z+?-K}??&????"????OT???)"???j2 at U??????4??L?????^G?K#?0???|???m-&?{??9???i??1??n????t?D??)P????b?li??? jt?M??X;?P!????Y??V??}? *>???0F??sZ?P?J?X#=??/?Q+6?????i'I??j?]?v 4??????p{??b??x?`???uw$?????o'??????? ?$?g??`?`?q??)??~y??| +w?Wv3iW?D???s +??+.#??k48E??X?{8K?????9LiW?\???I???x???? ?p??#4???? + .?2?uh?????q??;???}? ??9?S?X??(!?;?t??&??c??~?%??C"wA ???5???Xg lu???] ??-??I&:B=?k????????u!?????c??d?.??:)???}K????*;#??j(GBT???a??Cw???(??@oE?K r?G#O??8?"z? +w??.r?????h??????E???hM?? ?t????rI???}????A)?? ?pq!R?P?5???2?Bq I??j???Twp??j??Q?l????0AY???aqX?J?????x?t?/Q??g?????f?ZE?Ky?HF?T?7?%??/-L???kA: `?!??&?'??]"???h?ZC?0 ?l??f??TC???S"s?(y????T? ??WE????????6??????w????^??r??L??:????{?5aQ??. ????L?-?H$?fDJ??*?d?z???w???&???????r6??tZP??u4e??uwMm????4??4=?M????e{=A,??b!??a????3X??'?????W???????x???????f?T?}???p??#????C?5???????,??y???~9?M'B?fKu?:;????g?f??z?E?%??H???&Y<M*V?Kd?0?d'>???T???? Chc?o????lz& ??Q?k`+?nJ;?9??? Wt{??:8K? ?W??Z?? !??????m?0???m^Di??s9c;??!???&???????l?+U???X7?Z-?d?=??P?????4RC?.??z>????!?> ?ka ?!??]?u?*($?. z???n?5??!`?%?/id??e??????{?x?????i?e.?q??tU?-{$o?[jo???? ??s?G?X!?O5???AM?)a_??B??d?fm????S??&NM?D5>?8???I +??y?+R????????d??U?%=UH?j??h???Z?@?j?????4????2$oOI?F?AT e?I?>?T??^?Q?U???(z(C?&??-??Ua?6c???Wu? ?i?`?????,z???????8 k"H5?|????v??e??B?????AShx?-??\?Wg??z?? C??E_??6???+0!9,??th???? ?OG???g??? ????Lr$??(??S?d??8??? ???+ ???(e??@AI??I???[1)?????QjJ?l??-9,"N1???0???A43?lI??X??bc?}?6J??W???(?? +?|?6???_?O????[A?8YL]?C?\?????t???#?:@k??b[???z^??????????L????$??avA??jJu????? L????5[2{Y?J i??5f?w??e8Z?????>>/m?U?v????????of?K??"??????U-???6??????O??????G???e:B%???;??gO???O??E? Ej?M1?K?[???s????????0U?k4?????u???M?$as3Jpu? ?????-????8L?e?i?sB?j- +B?;u{?nj?Q??$????????h??j??????vm6??.v?p??=??9???j??x???xx???u?U??*:tjV???? +??6?)?I X???d;?? -T???FX? ?H??B0???D????Z ?????????}y????????E?CU??????N?n???)5????f? ??s?G?X!1O5???Au?!a???B???? ?? ?4???[????-}^q(x/?.????8?)??O???i2??.??r {?#%??n???"??j?z???M|v?+Mrs=jHr??]?J(??? ?Z{?R??z{X??R???n???B??????????? ?Q?J??????n7?C ?9?7x??u??Q?G??4Wi?jH????????C?u????m?f ??8??S?(???/gFM ?z??]??&_? e??????;O?????n?>??F TN??sv%?p??Wu??& !?!????F,??????Q????v??????-rm??/-??D.S?? (,?%????????? ??YR?????;??c????F? +???:?5`??????~3??4???:vg?????u??G5?gB?%??kU?G?;?-rQ? ??#????G?? +???-??S???w ywU|???S?)? +-?????o??y ?L;??U???P?bW?????r?Vw?C~?4D???u\?3K??C????3?w?????e???[????.??4x???.*W???A?@6?'8c?????+?LIm??G?h??p?????L%V?O?????a?z??5:?P?4cW||????$|???vpe???$]?rJ??4?[!n5?????F???]?????D!Ut??????%??CQ???????GX6.C??l!???@K~L??7??P?)??:???!Bu_???t(b?m?N??R????+{??"?5????1???G ?$?????V?N????C?oa"??T???l?T9 ????y7 ?l????QzO???^?It?:#??????a????????!?V?w4????r??????O?(??'O???# -?Y???x?????IdU??df??=?|Ci%??? ?`x??[?N#m5 ?!i?(!g?D??&?D??B???Ssuq ???4?P??N??d?aHp?S@?"?_?]??????G ?????^??j??6#??????0|5???J2??=?J??e4H??#?????6?\?g?>JM?? ?????m??????(?CE?/F???_mj???e??????3??Q??A? ?](o??(~?$???9?7??09?]???>I?_??2=? ?-?????4?#??7$?95?Bf.?&??&????}?}2G J????kb8 ??gc5?d +z???)?K?Vh?????????&Y??r??l??n???,?M?z?;??=?|??? `Y?o:????????MN????]C?gv???7{???????????p?????{LE\??W?%?U!???>}???S??Q?Q%t????? ????"E?(? ???#?/9w??E???)??i????(!0D??]??X4ZI?4b??p?)+?^ ???c*??????j?m?????Z?V????? =;???????n3&????o?)}?I?? ??c?????) 1a?? ?M????????Z$?2??Hn??&- ??r4+c8$=I@??,I5I???+'pXN?#??mQ?TMsaF???i +?????K9?(??Y??o????FvY?O?B?@z?Cc#?RwB?h??0?U/?B?|??Y}m8-??6?R?hb???????D??g????G?m?t?|?%??% ??l????`:?~?? /c{?kH???u&.???????(??i$?????Z??,?`??BnZ???A$?qS??????+??N ???4?Bv"????N6c??B?.A?5???M?????(9??j8?S??&W?F??"??3$?ZNI????}?D?-?*?v?h???%?8P?i?]>_?=???5U?SO?QV?V?FX???#V?\???mY?X8(??? 8????.c:?Gk?- I4??/??o,?4?????>? ??(?"????%C?{`?? +???y???E???_6?r?X??*j???"W?q?%H?????;?T=?iOl?RQ??=#\IO??-???K???a?r????b?24p??%r5?{G???~?_?????o#?????q~?;?G?????$??%2??3??F??:??A +? ?^?8?`?n#@R-??OH,"?%??????Y%0|????$n>??=qL???4????h?sj{??<-O? +?U!B???4?4?F?S??x?R# ?T ?j?r???????;????????o??+i?N~???? ???zW?wK?b?????r?" Z?ttt?I+H?k???Pq?71n?b|? ?:??_?>XY4M??|? ???????? ?@9?j??94?N*a????;|J?lz???????2Xz????????YA?%@?v;M?e ??bZ???J{|%Q??,???G:????w?9 +???{h????M?I???^Y!??r?d \ s??? ZD???;K??Eo -??B?V?]??????[ :?y?B????'???C +? ?????0???F?mc??k4????a?oYRg?{qwj?P? ?AJ????{E?????FY?i ?'?y?????????~D5b?7z???c]??3?????9?NN???_$?-? HI?6?W(2?v{?y?? ???\h?r?Or??^??r??JZ??J?a???????????Iq?jq=?)?:D?2QDY;??P Y?u?????]?*?PZ???D> G?????nw\??+L??,?T??% X??a?)V???f?;??nj??W?lR&??`e???-?a?B?5????????NMw$rh?KO??X?????]?U?(G? ??#?????5}\d3???|??????V????????????????1???u?5?R(??8?? 6??vhA-L?Btp~???Y??@???c0????!j ????'?HCTz+???)??,????.P q[??E?????????B?xt?????!???-???? ??(??aQ3rN?,?J5?E`??b?v??0?O??J?? ?f??x?-?|?^?t??nFJ??**&?? ?y????=&Ha?d???Z?9D?]???a+l?K?nH?????r&?o??_?f u?'n?;??? X at M*u?z;?|?}n?o}lt????`????2??w2???????????n!???L??j??B??<,?T????? ???E???`L?u?v???cF???H2?????>u?"?????4.$N???MH"?1????4X?? 0??V???????????{]??eH4X???k%?\?w???7?*????????bwe??}?i}2?+?x?3???K'?vs??????G?????Fp????E????????????_???(???W???G_??M?????~04??cx?g,???F?? +spkf?p?,{UR????F ?5K?3???3??|?:??==????w#14g??aSs%?VCN={?C[??4Q:Z????t??WPK_WP??4????^?l???U??3?????????^??o?UE?/?ed ??4???f?7t?%o???}?S??V?hd???l?R?g?G???:B>???3??.???W?T;???K?? +?p???b??%?!????$????2|u?? ?{?????5h*q??Y Z-f{?r???p?????????^r?+&?j??,????h&???U?%?8??4??????-u.?????A8>(1?-h?????#?^??60???;???'??Y>OD{k|?i?l?mx????????R??3*{?el??????#????????d?ep??????B??`??\ P#MS?$????n????*UuZ????T~3?"b$F?4? R?J'??b??u?6?u[?&m?????? &TF? fG^c? +? VA?6???z~???C??i?C??????x??|??????z w?/2???o??Y??????+?a?g*???=?t?%?@???:F????????N"?'??X]2h???Z??\???)??OC?=???H?V??Uf?????J?6??uVs???f2Y?? ?????ELW???7??;?E?:&??fC??e?y???@3?q?=?_ z????.?/?- E-??3Fu[???e?X??j>73?????.?3$=? ??l?|??F?.??e??f?>??d??JxC??B?????j???l]z?5G?.dIsOC??Z????G?Q??M???;'$t??e(??v?j?????????{?KX?C??$?v[??"+?Z? +z??r?????? ??@?v?)??r5???K?1?-???]???????zJ?#???(??E??0??X?(??Qb?!???(????e?9??m??cR????%??+???n7?^ ???.u?L???j'???T?N??\h%cT3??H?,}LrW?R?s?'?b"h?7??? +==\D?]Q??CGy??2 40.???S?(?V2?N?q(?\P??m???lVP?C?X?ey??Uq?me????p??Q#??SuH??} Ox??8n???????\$?,! ??I?T~?Z?Eb?}??g)A??A ???6-??,?i?????z?\???.5L??&?6B@????{?n?A?w?3?a??????h??;??? +?.??EY%U?K???J2?? ?? ?-?V??T?[?? ?x????K???V????)M???????9???i???Y???L??b??AI6??? ??<@Zd(5i???jl?C?? k?AXNI ?"?H2?g ?????*?D??d'??_???1_g}???#B?}?J???/??^P?%f=??^k?2??? ???9?-??K?????/ ?{(????????sQ??z? ???Yn??%>9h?dv??m??JY????.,-c????H???2?+K2H|???S?Iz2??:?Q??I*?XY?4?l(R&)??vT:i?y??u!???Pg?z?iE?? ?eE?O1? +*?a??#?1 E2?5?*x????R???????C&NyH4???P?0?/??*E +???[?????g??????????_?f???N_????HeDI?w??????|???9? +??9<`?a?b???8"?O??b.`D#LK?*?0b ?xX??U???.Zj? [w???M???s].9??F??)?? +???Z? +??\ t@??????`P?????fn?N?v?V +?"?????Q?????M?x???]?1a ??=?5O????NEv??UGb??p?U??Is?;N???Ns?????c?0???????????Pj?? ????B?M? i?w?????? ???W?????????A??rb?g?q?oG2??6???W????7w?????a?_ ??w?@?'#???[????????SfUp?#z????????????ysG??'.?O???g?? ???????|?a?Ft?_0Cw??lD??54e??,,A?Jt?? I???M????O}-??v???????g?t??5k?tH?<s9?????u ??k?????????x(???@???t8?f??6? +?t?$?`j?~M?A??????Hbv!T???? Y"}?$????? Z?wV?x?Na???v ???J(a??xGUw?????~?"AIj??????rFz?R+?^<#U??k>??F???}??3??3 1~??ncqF1a?:S?YM?`??>,??Qs?\(?e=??MCF ??^???]?????5pV?`V?e??hcu? ;X???n?I??l??zY???fg ?8?/?[??>%B?US???e??j?nM ??@??H?"?J T?????%M?UQG^?4??0?j7??m_????Oz????^Qq2?+??r??????xb??? ????Yk? +??y??;??i [=2(Ea?}}0??????wB??????S??4???,H9???Vb^?f?9q?????J?if?U??'H^nF?S????f\?~^??}???Wm0??U>??H,?????c???LV????4??n?"z??oThfC???9?n=??3l???7?W?O{?{????lc?????????:,??????????v?p??~P??D?> 1?X? +???????+??d?{_s??s#??????uZ??4?!?P??,???E?4?o??V*?? ?D?r???Afb????tFV?????!M?P??\???T???Fy?xZ??F????O!z???I?V3$?C?%$_@=????V? ? +vlm?????`?n?{???a4E-??Su +3????)?P???V/i??????g?\r%??(:?U?J????CCA-?ai?. -??=7?Y]-x`?<4?Q?I??????~ +??$???e#?}1P?R+?^?????5?????3?*????WW_?y?m????E???v?I???n?2O????|-?[?1???O?_n?????8VGH?GIt?,#??S?'??g??&H???????O????o_??/??2B?V??j???=e_????????B??2??[*6/??hC??????????4k7?u:\???e?f??)?Sp^???z#?? ?|???x?? ??u#@????a?S????lE??/?=??~%??(?Y]????1'??y?1??N????a1?Z&??I?f$??i????\?5Z\D!?J??6`i??ck? ??ay)2?n??iw7?h\???xF?????? ??%W0^?@??? +??z??;??&s??1???BL +?s?B?p?Y?$??,?ar??j|??4 +%?;?n?SA h ????!??-Y?????q 5 x5?uw???:e?w?G??z?5V???Y??V?!`Q)???d????r ???+0?:??s?? h ???`???r?*"K?>????7??p???????t`bi?????Z$??4?4?H?Y.???k??}??t? [??Bi{_??}??k??/y?? V???????#????#???G?u/?w?OwF???|?|??????>??G~????#?^>????/6??????H??n9&|??????b?-?NPl#?U?~-???????`#uE?+%?????:?????:???k0:???!?J??WL??vWY?2?????"WRE???\??$N?? r"?a?a???y +r:"X??]?rt????Y6J=??2?~??0???p#]????P??G??+ ?? +]?3?}"??'?Qm8 +? ?Q??t?j5?ReKB?????[???D??f???k(K???6o????M ?? +*R:?????@fd?????u??!?n??Zb???l??y7?I pr??d?????4?R?m??j8$??t*?`???T?????*??z?!EPAj??^???U2?t+V??~?????9t?k?l'Q?E?g +@??W]????C??bp1B? '?&q??M??]"A??E???h?,??? ???v?%j?&e?L??l?m??i??k??`?v?????? ????*?|?``????J%??]?l +UNTe4?G5tCp?{??XS?????U=}H@?Y?nw0cT???T??}?????D?>"@??!???l???? X???M???????}???Or?e???Z3?????????V????f? ~PI?#?I???Qz?vQ?????,?j???rE?x#?RK?3????? ??,?? +???{??_??`0>stream +H????^????g?t???%D???y??( + +d? ?????????P?????????@??_????B!?????B?-o?? m??B!???i??P?;?+B!?!?B????w?f??????4?`???1?O?C!???O'?~7OT??? ?2?9?A??[f!??????Q?XpX???f9?T33?2d??!?B???-?????i?m?4n?,5??I?1c?!?B?m?-??j5d?Mhf?G3n?????FC?r7@!??gZy^?!??????;i???i6A??????2??.}?BmK6s6?5+???YG? r??9#4cc?-?(?7???s2B!????O?o??\???j??a?,6B?5?Y,iFbFQFB?0?"?k#?B?????????1%kj?|??7B?g83g?,5cc?)# # ??Z!??xj?o?k?j5?i?4d?2??4?/?u???`?0g\??1eb??f???B!???}???u??!???j??%H?????0?x??Y33c???),????? )?^??jU?B?e%?By05??????Z? hC???:??4??????i> ??~D)/??f?4?25???b?X??#?B??9c_(?pX6?5???X??H3???y?_??????3?1qfhY??A?????) +???l6{?]#?B???????????l?55?Mk?iz=??????,?+???yP??2gFCk?7{???if? ?A?LV???Rt!? ?B?s????u%|??!?H?j???????S?|^%??x????b????f??u?L?^W??3e$d?_??t*?J"?B??????iA? ?F??PS??i4??w{=?oYL?)??E#H?:?qy???pf?8?7{????4?L?Z)+??e?2 VL"???B!?H?????!??)????J???4??x2????h{f>?t?????????K???L&?R?!??dN?1R0G?!?B?????D??Q?H??i??B??H?5??5??Y4D???*i^?3?3?3w??d<Y?3?J?X?eI3J3gQ??LP@??:ws}y?I%m??(??d?(?o??o!??d????|R6?55???&z?'???$M?Ro?4?k??D3yY?????vd ????5g2?d";? +??????}G2t?{?]?By????W?a??J????#g?s?4?J?????a????D3??)?=a??Y3????"???w!??h??W???a???i"??y\?&?'??-M????fJ???H4?3 ?????<3??????f?Z.?8??f??f??????o?BmCb??okX5??2?Q ?H?H?/??s?R?Zoj?0??v?R?Q??"?2????_B!?????uh??a??a???4~ xb?????? ??Ro??$????R4?4/??A?4????W??{}?I%??h?????qf?0?n? ?Gg?!?Bm nk?*???'??|L?@0t????T??&W(U?h ?h?h~4+???g,?????? +????tr????>i?????????*a??B??h?7?6?[?4.??F??x2sy???*?F???? ??W4O=?n5j?R>{}?I%?c??I(?????K36f??Bh?s\C?qL#I??Hs8> Gb?D??DS|,????3???g?f?Z. +?$??h?4 ??3{{???:?y?Fbf?2G!??v???i?4?$i>???i$z?H]\?????[4?V4.?|^??L!wsu?N?c???c?qfGr??????B!???G????q?f???????x*sy?+,Es?F4??e?/2??q?n6????h?Z?????Z?Z?????R0?3D?YF?? 3?@????????l??X???.?;zk\?r?&?L?"?????f?4?0 ???2?0??B!???{p??f?#?P?Xm?F6????l?X?r?f???q&?KE]]4O{????=?M'c?p? ?4??mV?3T3:g??_B???????F7??46?????gE3?de???????3=?3?r1?y& +?^???3?f.?j??"?B??:o?4 %??j?;]_???IE???E?9??A??3 q"??ih?)?2?? ?y]N?K?@3!?z?y???4&??fw??? ??l? ?????h??+??h4?UU??3S??????J?p?I??? ?z\??b?%g??B!???~6? i,V??????h?G??????X?N?7p?%??b???h?? ?%YQ?ds????n7DUy1~?m5j?r1?N?????i?ZL ??%6????R???tz????f?R 9???h??????,??8????C????T,b}???????GB?????,&?A!??_??0fM4^?u8?L???w?f?7?'?\Z?TB???@?|?i?U???8??V??Z*dS?????j6]?8?i?!?B?5?&? *?[M??h. W&??????P4??)?V????????T?5?h?y4?hY????????R!??GC????[??+???3 B!?>9 +]4c?Xn??:Mer? +Wou???t????J????y?@C??R??l2??u????$??k???8??7x!?B???K??d???n??S??m???? xA?KKE]?????,h?g6d?*Ki& +???j?WK?l* ?>??n53F??x!?BEG?\Q?8??u8???J????G??l!+?J6?4?D???R??l:??u?R?e?0??7?u?????Q??" ???bDA at D??.$????????TY????c???7?wS?"??Y~>z4???_?RLIf?y??|?h?{}4?/7?e?v?Wwhv?m???|2??v?~_??.????BA?????????'?G??2???A???xJ?? +?J???????|??lmg?w??7CCfO?f?Y/fcC?w[?J??e?T\?"??Ya???g??x????r~A K???d????}??? ???j???????h^????wl?\/???p?k?6??[-?N(1), + |????r~A K???d????Z??y~1?th6??8????84??a?????xy???w??|6?Pd? ????g?s???!>?C??y"?^j7?j??? ???|?6-?OCs8??2???t4???z?|?]???y4 +x?G |?????~A K??T?W?b?????uc2[????????????9??\-fC?w[?Z?X?g3IE?????XB?3??NC?????S???v[?o?? ?>?;4^??O?F??d&?/?j?n_7&????Z???C?s?f??M ??m5k?b!??$Y?C?s,a????D???Cs????H??????????z??F?|???T\??E???C_???/?!IV??l?P??Z??nLf??????o???-s??OGz????U???l&??RHx?%?;4??=??X> Eb?????v[?o?:?/???fk9???Cs<v???????????]?:?M'?? +x?G?3?th????_??O?9??\?7???p???F?t???T\??th8?0?3??/??\@Eb?????.??Z??nLf??fk;;z4?_???,OC?k7??R?*?&Y +??>4?]????? b8*??jN?)U???`8> ?e?chv;{k????H???????u>?I(1)$x?G?3 |????? MH?????*??yl????/7&??ih^ ??ch?O??;4j?M? G?????/?9;c?????&?????w??S???&?????<4?????l?????C?z???h9:4???(?14????x???B(S?????O?C???G?h????64??ll???V?Zv?&??????{??????14b$z?H?9??\{hu??1?-V???~?mh???????F?ih?? ?"???!,???$?S???6Z???p|??Cc??Y????u????jR???(????ah?+???$+th? +?j???s?f??Cc?oh?????;??\/????v?j-????Nh????] w?????Vu?X`'??vS?????????875>???????PW]U^ZR?N&Ey?H&?GSUS??????;0<>5??????wp|z~qus{M?4???Cs ?Anh????loil??? ?&eh???????????1????`h2??[?G?g????:4g'G?[????????hh? P?????>??????????Z04?G'??\????????C? ?d04=?m???T?rCSQUS??????7869??Y?????????X?,C3? MMUE44 C?Q"?L????klj?????Y???Csx|?????fbt??;??hh???? +?g???,??????o?#3???????Cs?????????ff?C????M???/?L?CS???hh????????C????}uy~fbd??? +????w? ?O?/??GCs ??Cs ?????8?44@~?CS\?84]??L?/??on???LGC???rC@44????? ?-m]?????-?74???7WOC? ??@ow?? +'????? ?f,74??\?54??M???????????????Q44w? ?z84???t??^MO????l84[???????24?????C???#C??????T? ?`h?????y??`h???????Y?Mnh* P???? M?????????????`774CCS M*;4_?%8?d* M]04???/???o??\44m-??(?_???op?ahv h? PX/???5;4??944????2C???C??|h? ?y?????Y?????i?eh?O? M?g?n?//?N??w?6?2 ???C}=???_?k?*?J???????(?J?KJ?+?k[??? ????/?|???wp|r~yu}{w?3\?w????????L N {??=C???b?????????))N' ?oE?dnh??????R44G??C???b???gh??34@? ?>24??w???t~ghn?? ?y ? {??=C???b?????C??????`h??v~|_[?????????PW]Y^V?N%?????O????K?*?j???C??<65???????trvqyus{w.??>'C???b???gh??34@? ?>>4g????.????RCB84???4?u?DC??????>?C???b???gh??34@? ? {??=C???b???gh??34@? ? {????????????M}]uE84???? o^M???lDCsnh?????gh??34@? ??????14@?}dh???non?.??N?v?7?W?????z????k?*?J??d???Y?h?????5? M??}C?????k?v??N?/.???? ?????_?=C|??=C?????????????)C??????hhZ P0??????&ah?|{?????Y????????????M&????4?MI44?????< MEUM??fgkc-?0 Mog845????IC????iln???~??7??*??ph????????"?T04_ ?O????qh???2????G'????:4GOC3??????hh?By54???Yyohn??99:???\?,?Mg??=????I ???I?CSVYU??????m`d|f?qh???n}t??P>@B??E`?-?^,??? ?????$?$N?W-h?Z??&????W?B?????z?? M?H???[?7?5h2=h???4G-h??=?!h?K-h:$h??Y!???????A?e0?L4??z??? ??J??!A3[?????As A3'A?m7*R1'$ca??? ???{2I????py?????J?-h??????Q??a?Y?????s?*=???x? x?N;kA????4~.K +??D??K?f????4?k?,??AWn?J" ???????A????!ACiAcw??.OiASm>w_F??j?????o?????zY?g?X?-hX??M?h??1?{?A?XX???#A?????R?)kA??????9???b6?I?T??Y! }???2f?dD????Ac%A? ?H?d?b???{??{??? )=h???l2?w???T? ?X??{?AC!h?~ F???V _0?2?L?????f8)?oAsV???f5??^??????K'????v?????h@????AcfX??????I??S??????-???Q ?g~ +? ? ??t=h? ks? +???a3???;1????A??s?XR??J???'??z?????????aOn?+O?????9???????(4Z??pCZ??&?f,V??? p>??=J??s?e4??g>??????z9? ??v?"sB2????Z?2?????????4f??????T&/?jM?;O???pT???Z4?????j>?t???????h(?#Ace?4 ???3$h(-hl????c ![|??[ro?#h.??Y9v??b:??V?,2B"????? u  ????gL?XX??? p?xR??J???'??z?;????????4????i5*R1'$?.?q;????)??h??Q???zF3?ZI???(???E??x??????f??????yU9???r>?t?U?1?I??P?G???2??A????JC?-hl????c !S????????fw8*?O???zV???f???=?U+??????9?????3? ?nG?=hh?b?;???'?\Q??[??p?t?f?,??X8???6?1S?4(?!=h(?lam???E?I![x???ro0??W??4?[??~??U??f?Y???A_n?+R1+$???%Ac??i??????{?D?????n_ ?S???T??;??d?$As?,h^?a?Y/g?Q??????|?????>??nc3?n????Hz?am?? ?B? ?kM??2????????Y?|4?L?f???????'7k??BVH?#???r?X??F??M}!Ac4??a?v??E??t?(U?m???L???N U?{????zQU4??f??M?N?^?s? }n???2f ???3?g??????\OT]??<){??^k??? ?("?????????'e?>??w]?r{?H,?)+?vo8f?? -e?-4?? +?Jx?}???z?X??b?P??v?6?b?h?/?Csum?Xlv??? ???T?P?>6???r??\??g> ?1??~?%?Y?K??????m>??w?t<?=.m?ZL&????g??F;??@(Kf +??Z???Yn.?KY!?9~?????w;M?(?R??d??EB??I?(??t}???_????d?X)??????x*[(V?O???a9^?V??????*??$?3??;??j?.??G?!????)??d???h?/8|??d?P6???B?X2??/???`?Lg Q????mw??o?y??ny).??d??En^???S??? +?????Cc|?J??? G??\?X???{?g??i%+u????*4[UU?+I?9??;??j?.?ND?!?>??8??L?????D>c??no ?%3??r???F?t?%y??????????Q????s????jc"???N?7?????]?Zo?{?g?#?Y??F??g? ?1u??+I?9??;??Z?>?I?"????t?)??d????3y??????????x*[(V??Vo0?Lg Q????nIh>??q4zh4UY?Kq1?&?A??T?? +?t"6FC?( Fg?????????'3??r???????i%+U?????F??ny% ??eF?n??P.2??m8????????`4p??py|?P$????J??S?7O??B???????????B?????*???9?>{?f?Z??e??H(???4e?h?|????c???????T?P?<4Z???ag? ?de?j??~??g???G?n??J??? ??F?R,???h8????????8|??????d&_,???????? q)??3??g? ????F^??p,3?w??Z?X?$?d4??hL ??'?????3??r???? ???????lT?|f??g?????QUe-K?b>?????S?Z??e??????CFc?, ????????????F?;1,G>?4>?%?y?14/zh4u??+I?g,3t[?z?T???H(???08?3>??n"?D:W(V?N??r??(?kEU5???w?y?UU??Rx????i6*?B6???C???x??K????O?"?Ie ?r???? ???????lTmKB??oC??e????|:{??c?|?????h??????v??????z?L8Oe???j?????#?Y?????g~ +???A?????D????h?m=???h???M?OFC?1??\a4?:s?w?L>c;|??&?1>?hu?C??9/.WkM????}????????h??0?Xf????j?.?I?o?Fc%?!??Di??:s??????3???????t??1??xAZ????lO??.4???j???$?3n2?;?F?\,???0??]?>C:c??~??L?|??hvz??dJ>#?Q???~??kh^G#??L?hz???1???h???4????3?g? E?i??|&x9~?L>?? ? ??qy??????Q??,Ea???h?m????? e9??8?~p???X)M>???g??gF??H?3??3??MU?h$?7F???&?67????Ai?????3e'?q{|??L"????5>3??gHh??3?????Y?V????h??h??F??GC??1Js????u??????????^?3??D*?????3??3???????Q???d4????h??0???r?v??????o?6?w?t???|??????p4?Hg?????????? ?o}y??q>?- ?f??W???s,3?:???J?>?M'b?7????r:?h??n}????Q????8??`c??????Q?@t??=??????????\???????u????????}??O????!?B?3E?L?'?????NN?3?g??3?`8?He???j]j??@?&?l?\o?????'?3???? ?????i????????D???????iB!??@3{?|???????u??x}?`8?He??????=?o at c?b>?5u?c???K?h|^???u~?Hsb???4@ B!????k?X??|z~??t{n??`(?m?K??????=i>s??p??y??B9?7?2&f???|zznq? Gb?T&W(1??e?H?y-??!??%?R>?N&b?P??????i??4{?????1e?B??!q?o ?p??4sv~??$?x???`(?'??|??Rk4?3?!yfz??E??Eso?f,D?l?*w?|6??E?A?? ???2?@ ??d??nB!??^?????K3?3n???8???D*?+????????`8???x?A?B43?:P?NK?W?$?4?&2H?f?1Mc???` ???W?A!???2>??????????8????g???]????r?7P??>?/VG??%?gK4[C4?:?u;m&?R!?M%???A?k???4 5\5?5&lB!?;????? ????K7q?K? +???3?r?.???????x???h&$??"???Z?\??2?D<*H??x??+7 C S g???!?r^?w??Q?Y?Y??????????q????r?7PG?d:[?y???g^???D?^.f??6?n?%????b>k?&????x??4?jL?0??>"?B???????3??)Y?c?Jh?????H,?8?+??Z]jud??P??N?Y??[???????E??5u?S?v?A?)i?D?X$L???L?Pc?????i?B98??????e8f?? ????w?j]j?e?7PG?D????f?}8?gL??Es???W??L?h???t;m?^??K?4?p?4 G W????????? B!?|q??2??K? a?4skh&K$S;g?J?jc}:[p???3??T?F??????E?L*???"?i8jH5?5?5L6?s?B97q????2?e3?[???x"??f??R?8?h?;???#m?????z?=?gE????WK&???????nJ?*????e??D"??B????P?T#XC]?B!???:a& +q?$S?lNp?&5[??^8????? ????p4K0?dr??L?nqFe?????z??7?????fkx????9??!?-?f?H3??!#??nI?Z?????e3i??b sM0 ?? ?B????0?0?0???Ig??f*??$??rWpF??Y?Vk?3???k?i?? ?f>??v?4??0M1??ej?jb?hD?FD!??S3?>G"$?L??C3 ???g??=g???f???#{?F!???~?]?V??l?O?4*'M????4?b?P??!?l?lb\7?"!?rD?{7?.$C?Ig,??+U??f?M???????????s?&?4??4??ji?fD??q?4%????????&?N??l??nB!?????R)? ? ? ?L?V?K\3????Cudpf?\? ?p??FC"MO????a?Z?????????B7!?r^????3????a?i?r?4? ??6????\xf?a?y?y????N??^???4sNm? +?(????X?]C?)s??*!?B????2s I?(#,?0?!?0? I3#??>?q??f????{?5i?4?{????i?A??(??P c ? ? ??B!?f~?? ?p?0???5C?!?0?l-??~5g^??i?ifS?4jzB5???M??!?B????$C?a??cF35cq?????U?yA??k??M#P?Tc??dC??Z!?rX??w8c?d8e?ef^j?s?a?????3???????n???F??`?e?? ?B??Y?? #(3T 3??S3+???q??z??a?Y ?X?k?5?5??G!?????;?I?(?-ca???f{o?????C????Hc??P3#?p?p?0???mXC?B95q???[G? ?@EW?V)??H???????GUUa???????k??????3???;g?I???v?? Q?UsrU#??7?6Y|Mi??|????q=#5S(g2I?f???)k?k?m?h?M???OI???????j?T???f?{?45?5?5???w-?V>,?+?2?2Q?H?????;g?I?f??????F????E?&?@K??????j?X?H??????????U?a??O???R???3????f???*i\??Q?U#]???/??e????E???5?k???J??OZf#fj???G?u??M???????c?2????f???[?P:[???8)p?((]+???ph??OJ? T?t????o?+???G?:?? +endstream endobj 27 0 obj <>/Filter/FlateDecode/Height 990/Length 72410/Name/X/Subtype/Image/Type/XObject/Width 1038>>stream +H???o????g??]?-???U????-??? *X(?h??6?M?R?*??I????6???b?\+?_?-????6???Z}Q?eI)????b?????s?}????{?rF????f???g?<{8?!?B!?BHy?0 +>I???T???]?&I??PM????]?XL?0 +\?k?`?Q??]??8????@5??a?|???$????&P &q.?5?j0??(p?? T?IF??wM?L?0 +\?k?`?Q??]??8????@5??a?|???$????&P &q.?5?j0??(p?? T?IF??wM?L?0 +\?k?`?Q??]??8????x?Is??ulD??&?w??i?0 +\?k?I'?ik???U??(?5>?s???(p?? ?'?4???S?FT]j?|??P??&????&???_L?=?[?fETQ-??]???MD=#=????iN[??P???v??uV??a?|?-?M?/&? ??-s?"???|??K???{.??&???|?&q.?{ t????"???w????*?E????????? u??jg=_g?IF?????$??L?????????p????"?????"???u?I??????z:O(???3????3???_?v??uV??a?|?-?M???4^8?|!O?K? ????+"j????*??j?9??,???9m?gH=??V?????0??(p??[?*d??q????????}??Jz7W??x??k??]K?Is?r????D??????$???{o????V???7D????s??Y???5b?????9m?gH]j??Y??Ya?Q????@U?p+???"?Q??9??,tj??|?Re????3?.5Q?????0??(p??[?*d??q?????Z?????????(? +k?????_?%?#????O{????????????dsf?K??M??e???D???????\[??l?????u"Y??fE?c???r?%???=??S???m#????W??L?[??RW??:?W??????jjg+??n?-????wfwn??Z?????e????3???z?~?p?L??/?z??/?z??/?z???????3?-?d??v7]??????Or ?V?^??&?q?r??U??N???Q??}6U!???{?n ??v??t???ge3Z?|V6??ge3Z;???j_????~??:??Rs?"?????C???????????b??k?X??vM&????}?+Gs??+C????{5???lP???T?G?;?;?Nz???Kf???GT??????d?r?{8}&U??E=Y??E=Y??E=?n_??k?????V?R]??????xNk?'??d??R?8? +9r?*?^??k??(p?>?????V??C7DT;ku: ?Z +????h>+??????????c?/?k??cm?HV??Y??X????v?u?}????i?d???q,?U?&?A?V??????N???zf??????J6??iK???????['?V??%??A?#???Lf?d?^??=?>?*????,????,????l?/e?????LfK+Y???M??ws????w?c??????Y????g?N?????t??7???d?Ik???G??qU1??k?~?)'??j ??|????"???+???f?.$K???t???????r9?????-I6??????????d?z??=???W?w????????Z???,w?I?_-??_????B???7D???\?'?5c?S???o?HD?V?,???$K???1~.oX}???P_?dY_?dY_?dY_???????Y???r?U????l?????>???????z???a? +??(;+??/ZG??\??Z???bf/4??w???FT4R?????]a?/l? +?q?????VW?C????k?57?W)?O????Y!?B?_ d?!????H??;o+?- ?^?|f?g???'Q?K?n`?I???4y???_}?2?????'?B!?b?l9d~???;???,??\2?3[F?/??k??g?2Z?$??????b????)3??h??{?5-?&H????U????o????t+h!??io???^?e]??r????? ??%??9 ??????K????K_RO???????l?R?????C?ZC?g?K??Y??b?wE?}?????SH~@?U??S&???B!???????_H$???\?@T>?6?oz2?z???K?????????u?^H??U?g?3*?>???e?????n??%@?~?1?? ???c/?@??Sz??c?G??}o*2s??}???,{D?YH?????I????????~X??Xs6d?U????Xj=?$H#?q}W8???l????Uu??j??J?|????B!?b?B?2?? i????r?K??g>2??>??????\u???;g???e?????n??%@?~?1?? ???c/?@??Sz??c?G??}o*2s??}???,??"?Bjf?-nO??%???/}???.????!s??.?D??R?$A?????????`??O????'Uc?U??S&???B!???????_H$???\?@T>???\?Y/??f???????9??/s???w#? ?/????Y_\?e?{?ZU????;=h ?{S??s????dYAR3?nq{??/?'m}??Vv?%?? ?kw$?6?Z ??~\?N????<??vU>???R&?2?~??B!?X??-??o?B q?????????????o2??|??o???g?(3????s@?~?1???)f??C?K???*??7vo?t??s??^?s?????=???????t?????????l??#???1 ??m"???=???]a????????Y?6?%U???R&?2?~??B!?X??-??o?B q????????????z??6??{???}z?2????c??S??#?rX?6?Y?T??F??L??9????u=g??????.}I=i?K???? ?????:b?{????&r???s?]??????????%m??QRu?*e?)??gO!?B?? +?r???/?w~?.Y *???h.????j3?G?9??*?h??;f}1??h?g4?? @??w??? !?????b?9$?U??c??b??5??d?1f?55^?s?????=??????????? ???;???+????L?\k"?????????b?9$?U???G?9?????}??sf?5? ???E_Y?k?????O????s????U????*??~'~???d?.$!f?3?oqg2?\?L??eE??TYc????z???K~????k0?????z? ???????zn???????/j???\!m??w?????h?@?????]?"????d??|6-J???????H?W-??W???ql??????Ee?Pu{?1???????;5????hjS??\{? }.????? n^??#8? +?1$?v! 1???}?;?Q?Rf2l.?s?+N(???\??s?U?}}?( ?9WV?ug?b:??????n???~d???@l at J??w?(? +i[??A""""???#??1????y?V?g?????Y-?M?h??z???|??? ???j??[T U?????++{??Ss~J???6e???Q?????????<0 ??? =??s?????]c?3?????3e.e&???>7???????g??>?\?????"??see_wv) ????? +o????G?]???T?{w?R?????$""""Zh?=????3?????ke??|6-J???????H?W-??W???ql??????Ee?Pu{?1???????;5????hjS??\{? }.????? n^??#8? +??\?5v! 1???}?;?Q?Rf2l.?s?+N(???\??s?U?}}?( ?9WV?ug?b:??????n???~d???@l at J??w?(? +i[??A""""???#??1????y?V?g?????Y-?M?h5??lbV????x????vO??W??a?X??D??w??????5?H?z]???xh?s???N_??{??!???????E?W?????Bb&=??w&????d?\V????6=6????v v???jI??z???l??b|??????Q?u???4??E??"??M?y`]??c?8???LE??J?B?V?w?????h?A?????]?"????e?% ??FF?Rg?|65?U??v????Y%&>?i??"?=?g_?o??b1??/Q?????++{?<p?^?=?:??\o?q???x???f?4???n;T??????????I??????(s)36???6?M??b??y?]???d??ZR?????g?1?????X??????k?}]?z&M?{??H4mSk?F????/??????SQ9??R?????$""""Zh?=???{3??????jY at k ?g?????Y-?M?h5??lbV????x????vO??W??a?X?9?K??y??/???^#?\??eO????;??!@???5???2M.{??N?{??+??.$!f?3?oqg2?\?L??e????j?c?X|p^o?`?/??????g???a?6??: ????i???e_???I??^?}+M?????e?;????|q???TT?-??+?m?z?????t?x???? ??/9?Z?Z??ld?(uV?gS3Z`???W?.???w?Z1?p??=??Z??????_????F ?"???????Y$v?????@??2?????&?~???k?Bb&=??w&????d?\?????V????u???w?1v???a~V?{?E?v~??????L?x=???P??????????I\?-??+?m?z?????t?x???? ??/9?Z?Z??ld?(uV?g?2????{????*{?????A??^?????=???E_Y?k???+R ;?xZ??Eb???(>???+??;|?hR????+??.$!f?3?oqg2?\?L??eh???j??? ?]?M?xw cg|?&??a??gZ?i?'?o~@m??4??C??E??Z? +?x??????J?B?V?w?????h?A????]?"????e?% ??FF?Rg?|V-?i???W?.???w?Z1?p??=??Z??????_????F ?"???????Y$v?????@??2?????&?~???k?Bb&=??w&????d?\?????V????u???w?1v???a~V?{?E?v~??????L?x=???P??????????I\?-??+?m?z???????haq?R????uMDDDDDDD4??G)WH???&"""""""????+?ms]??Q???????????????(? +i?\?DDDDDDDD??{?r??m?k"""""""???9?\!m??????????h~p?D)WH???&"""""""??#Q???~??6???V??\??????+???-1????&??4???Y?9rX[_??W>??L*???D??kd?i?dW??w??? +=??Cp???? ??h?????N????????\??yueR?5<$b?^#?Nk'?:??cG?0V????;7???^?D??~ M?wjg?????????+????!???tZ;????;???B?f?????l??'/?i??S?8??)???~P?[??U)??X????j+????;?E?v/?{??B??&?;5???????q?????_??@??%????????xY???]?l??'/?i??S?[?/???@??%q?w???)??w(?{????4????????Jq?W????????????;?=??? ?@?l?????Ky?8?+bI\????s??????wE??~ M?wjvk???R??F]k|??}zUW??? %?{/??lQ*??9mf* p?? ?@?????????Jq?W?u??????Uy\???'?$???h?E???4????^????w??hm??W??"??k??O???J~?>?$y??G?-J??0?9?,?$??B???$?#?Gk???r??k????s}Jy??n?Q???|[B/?ij?7%6??????KD?Q?Z/??)?????F]????m !????Q????6_?+w/]?F]k??|z at y|???Z???[c]?F??????>?zvH n???l?RU?m[????cG3?N???-bm?c)???b??k?w? ??Lw7??G???f???f4??L??%c?^??U??by_>=?{k?|fia~:?yp???????^???m?##?]Q??E??qK1}%?M????Y?ft?????h&??f???:y??#????s??{vw-??M?f???K]?vn????????(??"??]]????fr??|3???~r??f4?Iw?d?? ??9?????8???????? ?>H ???|=?8??[????m????73?t?l? 7??? 3?0?????????Kz?V?????[??b??n?????~?a?f??6??T???oI??J?U?{wK?T?0? ?}3?0?O7?0?L??fc??j???-??Z???{?n???f?a?o?f???f?iv?l??S?s??%?[+?Vy??-qS1?L7???p? ??0? 3?N???z?q????wk%?*???%n*f?????n???f?a??i?1_O5?y???n?d[??w??M? 3?0?7? 3?t? 3?4;m6????9??????l????????a?f?f?a??n?a??f???|=?8??[????m????73?t?l? 7??? 3?0?????????Kz?V?????[??b??n?????~?a?f??6??T???oI??J?U?{wK?T?0? ?}3?0?O7?0?L??fc??j???-??Z???{?n???f?a?o?f???f?iv?l??S?s??%?[+?Vy??-qS1?L7???p? ??0? 3?N???z?q????wk%?*???%n*f?????n???f?a??i?1_O5?y???n?d[??w??M? 3?0?7? 3?t? 3?4;m6????9??????l????????a?f?f?a??n?a??f???|=?8??[????m????73?t?l? 7??? 3?0?????????Kz?V?????[??b??n?????~?a?f??6??T???oI??J?U?{wK?T?0? ?}3?0?O7?0?L??fc??j???-??Z???{?n???f?a?o?f???f?iv?l??S?s??%?[+?Vy??-qS1?L7???p? ??0? 3?N???z?q????wk%?*???%n*f?????n???f?a??i?1_O5?y???n?d[??w??M? 3?0?7? 3?t? 3?4;m6????9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f?????j???-??Z???{?n???f?a?o?f???f?iv?lL???9??????l????????a?f?f?a??n?a??f??????_??%?[+?Vy??-qS1?L7???p? ??0? 3?N???=???Kz?V?????[??b??n?????~?a?f??6{?y???n?d[??w??M? 3?0?7? 3?t? 3?4;m6&?T???-??Z???{?n???f?a?o?f???f?iv?lL?????[????m????73?t?l? 7??? 3?0??????S?????wk%?*???%n*f?????n???f?a??i?1?????oI??J?U?{wK?T?0? ?}3?0?O7?0?L??fcbO?/??????l????????a?f?f?a??n?a??f??????_??%?[+?Vy??-qS1?L7???p? ??0? 3?N???=???Kz?V?????[??b??n?????~?a?f??6{?y???n?d[??w??M? 3?0?7? 3?t? 3?4;m6&?T???-??Z???{?n???f?a?o?f???f?iv?lL?????[????m????73?t?l? 7??? 3?0??????S?????wk%?*???%n*f?????n???f?a??i?1?????oI??J?U?{wK?T?0? ?}3?0?O7?0?L??fcbO?/??????l????????a?f?f?a??n?a??f??????_??%?[+?V???????>???}`????l????(n(c:????c?\???2E??a6???EV? ???_&???????wR?2H?????????o'????????xS? ???f???????? ??LLq7??????gK8??????w?o*?????,z3tC3?????)?f??]?>??l '?c?Z???[?ME34?Eo?nh?~?B34C31???[???G??-O??????e???N????|?|)??^?????w?o*?????,z3tC3?????)?f??]?>??Ly?e?5??/???[wj????3?Ka?????X?????xS? ???f???????? ??LLq7??????f??fz?????R?U ?.??{??? ?&?ejax??n?7??L\h????? ?? ??w3o??n?k???|???z?x?T??Jx???L- ?{?-????? ??7C74C?q??????n?-??m9??L9?I3????????l?T ??p???1??e%_?? +v? ??:?c?Z???[?ME34?Eo?nh?~?B34C31???[???r??r?yq????????????????8??e?zz?V???g*EC%?:?c?Z??? o)?n?A??#?j?9(m?I?V5|{+?????D2??j??o?\a{?>?yq????????????????8??e?zz?V???g*EC%?:?c?Z??????????92?&???V??kU?????KNOJ$3?1???????????I;??+?????_z*???w?\?????i?7??y?R4T???9??E??@8o)?n?A??#gn\g?6VzIfX?|??_?}?Ai+M??;L???#????Ks??z?j??o_??V{?=a????H??6(0&????????HCw$????C????[???Y??xG7??54C????????n?-??m9??s???????J/? k??S?k?o8(m?I{? ~??$???vi?S?U _????j???'Lu?????????8?p?V?c?Zi??Dx?A??ME34?Eo?nh?~?B34C31???[???rP;????????^??$_???f?pP?J?8??#?H?5????=?^?????=??^?gO????!??? ????qf????2?0??????"???fh&.4?? ?? ???fh?fb????W???N??a?3ifoc??$??D???8?^&?'????v\????,?U???)xSiF3?hV??hF??hF3??d?YqZy??&?{???+m???O?9?%?????5o*?h&??7???'?hF3?l7+N+?R?ds??{??z1?)6g???$VU\????M????Y?ft?????h&??f?i?Q??l???~??]/???? 8\?????{???4??\4???n4??\4??d???8??m??w?A??????$VU\????M????Y?ft?????h&??f? ?(?M?v?????v???MC?????6??H??8?v\????$VU\????M????Y?ft?????h&??f? ?(?M?v?????v???MC?????6??H??8?v\????$VU\????M????Y?ft?????h&??f? ?(?M?v?????v???MC?????6??H??8?v\????$VU\????M????Y?ft?????h&??f? ?(?M6??N;(m????2???j5??Z?Ty?????W6g???$VU\????M????Y?ft?????h&??f? ?(?M6??N;(m????2???j5??Z?Ty?????W6g???$VU\????M????Y?ft?????h&??f? ?(?M6??N;(m????2???j5??Z?Ty?????W6g???$VU\????M????Y?ft?????h&??f? ?(?M6?Hs??r>?:????n??f?P3?c??????x?{??:????n??f?P3?c??????x?{?????x????Km? ?W3`?,?U???)xSiF3?hV??hF??hF3??d?Yq?-z??f???/?????4?"Ww? ?{?#??h?f|?Y?f??f?????????????R-??X? Ruu%???)]??????????ft??LoFg????<\?]'L?E?p?????f?????_h?fh???fB;?CK??Y? R?s%????????Wv$?u?;??y??=??=?@j_???Kj???????? 1?.y?fh????????? ??C34C3?t7??????y?,?????????JJ??+;???:?^?????t?o&??#??y`?A'pf1???g*??_h???????fh?f?U*???{?|?N??bz???T4C3??,{3tC3?? ?? ?x??,^??-???e?+;?h???:vm? 0n?,lk?e4Y[? ?t? Y??n%? ??A??C!Cc?@?C?nY???ON?9????2??o??{?=???5?NZ?A???q?.r?{???3?f4??f???F3??E3??L????5??2???[6./T??X-???t? ??????t?+??s???4??\4?oF7??O.???f2?nV?a???-??r4???n?[??}?x????y??M'=#???4???K??? +?T??L.??7???'?hF3?B7???????N9?[w7????>m?l?c??p:]??%??ug*?h&?????f???f4??L???+??2???G???????a?????5??? ?=???q?????s????\??????L?????3???~r??f4?)t?z?UV&?????b?z}|?2????????a?[?A?g?s9n??_?|?!???t?+??s???4??\4?oF7??O.???~?D??q?s??6???K??@?B?ba???mi/??TD??"???A???-??MB:?/"????????????3?? ???y?????????Y??B?7U:??V?.?w?s?. ??m?]?m./?_??{?? wy?;???/4K? ?? ??B34C3?t7??:???9G?e!?w?s?pgk?g???AZ???>???8??8??sE???]^?NE34? ??7C74C??? ???'???e??o?t??vYH???9??Z?Yc?j?V?.??l./?t??.?\?$?s??S? ??B???? ???/4C34?Iw?hY???*?s?]?~?=Gw?~??????K?3???C1?{?? wy?;???/4K? ?? ??B34C3?t7??:?????x???@???1b???????~?zx?H???? ??h?f|?Y?f??f????????Y????MU?f?????t???U?%??^?k???sE???]^?NE34? ??7C74C??? ???'???e??o??53?l-???w?X?2.????_{???+?d}??w*??_h???????fh?fwy?;???/4K? ?? ??B34C3?t7??:???z???+?\?$?s??S? ??B???? ???/4C34?Iw?hY?????gK8??I?>wy?;???/4K? ?? ??B34C3?t7??:???z???+?\?$?s??S? ??B???? ???/4C34?Iw?hY?????gK8??I?>wy?;???/4K? ?? ??B34C3?t7??:???z???+?\?$?s??S? ??B???? ???/4C34?Iw?hY?????g????? ?????y?|y????[I???TU?wl|Hv?W'H???280`?t4 $?????|? 0b:??+?d}??w*??_h???????fh?f?  1M?I?>wy?;???/4K? ?? ??B34C3?t7??:???1??,$u?#????{?C?0????6K?={Z ???????????+?I?????N????' ?j???????i#??M??"I??./p?????f?????_h?fh???f??B?7U:?Gb????}?T?t~?q(??6?f??gO ????|^y1??v?2I????? B?????AZ??t????=3m?t? R*J:??8 ?]{k?????r???A>???}{?r??}??????{kz? ?f]:???????6b:??+?d}??w*??_h???????fh?f+ ra?????z?wy?;???/4K? ?? ??B34C3?t7??p???J??HZ??t???@??0???9??+]???9{?`???R?K????c????m??????O???A?\='}??? O?7???6???$Y??????fh???o?nh?~|???O??Ek8t|S?c~$???t???@?:??8,????X??R???o?y?W???u???{[????L?o???d??i?t? ???t????ev"???O?lv?8S????Q?LE34S??? ?? ???fh?fj???:??ii"?G ??R>&??{#s {??E?lo???????q?,Gg??????6B>g6?m???'??????ng??????&???N????Q?LE34S??? ?? ???fh?fj???:??ii"?G ??R>&?b6?_?\?^?s?&???????9?d? ??YF??w?l??????i[j??I?v%?#???y???8??h#?S???k8S? ???f?7C74C?u?????d7??bZ?H???~??O?I????2????\???f?y+9lN.7?rt??????k#?sfc??Zxz??]????vf?F?g/??u??????? ??p???pc?8?F????????hy??U????wjg?;#A???QG.7ja]???8?}?{??oih?f??y???kh?~?C34C35?nVz??4??G7M :???18??o???E??vf??t???|??Q.fc??KgD|6tk??????]??w>f??W??M???W?67????????{?F?d???u?3??T?T4C3u?Y??? ??O]h?fh?&???C???&r??????AG0??=????5????????N#?????:??l??u?????nM8?ZZ??K?????l?}??\?????????;?3??? w???#?????qF???*???fh?.4?????? ?? ??$?Yu???Dn??!???O?I???? +???,EB??l? ?`??l^?A???_?%????????H????m?O???????)?:???q+5F?????|G?^_?B?]???????????p???:r??Q ?????{Pe8S? ???f?7C74C?u?????d7??bZ?????Q}???&?T??`???8ogsm???Dg???'3???\??D?N1[ks3??;?s\????g?O?q/??8??Zyl? ????l?K??Z??w?g*???????f??f??.4C34S??f??WLK??G?>e?o?B*??H0?gD????6l?j ??mm???inu.?D?w???????? ?9.twq???????g?o?<6O???~Fd6?%?F-^??F?3??L]hV3tC3?S????Iv???+???d?#E?2??I!??$?3"???\?y????6???4?:m"??S????????????8????m???3??V?'??@?#2???q???]????fh?.4?????? ?? ??$?Yu???D2???z??l?t??Y???T?O?wL r???\,???x???T4C3u?Y??? ??O]h?fh?&???C???&?y??[.fc?????R$?(?~"?crh??5_?z?`?d????}?(p???? ??o?nh?~?B34C35?nVz??4??{???r1#??vF?"!G!????C?\??2??%?F-^??F?3??L]hV3tC3?S????Iv???+???d?#?????0?Y???T?O???p}?+?1??? #2??%?F-^??F?3??L]hV3tC3?S???,?tn???&???qkv??nVz??4??{???R>&?3K????J??????q?b6f?dD?z?`?d????}?(p???? ??o?nh?~?B35?E????1????8n???????bZ?H?=R\o)????H?QH%?D?????r1?M 2"s=p?\2n????k8S? ???f?7C74C?u????"L?v??k??_?f???f??U1-M$???k?J?t?d??????V[!???????0??2?G?K??Z??w?g*???????f??f??.4S?????o?=?#_?f???f??U1-M$???k?J?t?d??????V[!???????0??2?G?K????_??m3a???C????????? Y?mV??>t?v?%W1=wC???fhF??o?nh?~t??f???2?N?7lkf????a??t?#??nV?Y?????~8?z~???vqv?j??|Z{??9??y?]?D\??? v*??]h???????f4????;?????mt7??Q??L?;R??f????.?Y????????/?jg???^???????-?w??eI?UL??P`?????f?????]hF?Y?0? ??? ???Fw??a??t?#??nV?Y?????~8?z~???vqv?j??|Z{??9??y?]?D\??? v*??]h???????f4????;?????mt7??Q??L?;???J???????;??~???_?? +??b?????,????? +?T4C3???}3tC3?? ?h6?&?Awr?a[3??nV??"L]??w?????????5?w?<}?T???8;6??lK?`wYq?s7??h?ft?Y?f??f?G??l;L(???~??f????:GE??2?????+????k???y???&~qv*lV????;???$?*??n(?S? ??B???? ???.4??,v?P????m?l??Yu??0ue:???}????&g2?o?Z?j?????.??????yxY?D\??? v*??]h???????f4????;?????mt7??Q??L?;R??z9????L???mV?Y???CV;??????7[:?/?????????NE34? ??7C74C????f??aBt'???:vm??8>pj? [X????#+H?? +w??6? ????1?A?? s[?0F .?rgF???Sf4????y~?JM?????\k????3???W??#'7\:?s??YM??????a?\???n={e?p?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9?*?}??=rr???a;???T???Y<}???X8???W&??+??w????3???.4?? ?? ??B3??R?Iu?O?\k????3???W??#'7\:?s??YM??????a?\???n={e?p?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9?*?}???????t????????'?|????n=?w[w??Y???~wC???fhF??o?nh?~t??f???:?? [?5s??f?Uq??l???????f5??o_>I??v.?v?? ?????????? +?T4C3???3tC3?? ?h6K=&??>e?r??kL7k???s_ek??|'7?? ???I:?s??[?????mE??.???P`?????f?????]hF?Y?1???)??k?\c?YsFU??*??%??????? +?T4C3???3tC3?? ?h6K=&??>e?r??kL7k???s_e{?wY???~wC???fhF??o?nh?~t??f???:?? [?5s??f?Uq??l????"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9?*?}???????????? ??b?^??o+??w????3???.4?? ?? ??B3??R?Iu?O?\k????3???W??)???I?=????_.??e???"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9?*?}??=?v2????\8nVSa?\?????rE??.???P`?????f?????]hF?Y?1???)??k?\c?YsFU??*?{??d,???p????~????;???,?]l????LE34? ??7C74C????f??cR?S?-????t????8?U&????X:?s??YM??r1~/?w??Y???~wC???fhF??o?nh?~t??f???:?? [?5s??f?Uq??L?????q?? +??[?5?,?]l????LE34? ??7C74C????f??cR?S?-????t????8?U&??????YM??-?E??.???P`?????f?????]hF?Y?1???)??k?\c?YsFU??*?{????t??????~?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9?*?}??=2???N??L??p&??uA?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9A????\_I??z&Ln8l???O?%???w7??h?ft?Y?f??f?G??l?zL?#}???Z3??n????s_er? ????n=&7? ?w]??????? +?T4C3???3tC3?? ?h6K=&??>e?r??kL7kNP?????S???????a;6??????l??Vd ?b?? f*??]h???????f4????H?2l???5??5'???W??)????o???? ????W???R??o+??w????3???.4?? ?? ??B3??R?Iu?O?\k????Tq??L?????t>l?]~|??j?\?/e?p?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9A????y;K??v???????????R&??+??w????3???.4?? ?? ??B3??R?Iu?O?\k????Tq??L?????t>l?]~|??j?\?/e?p?"Kx??n(0S? ??B???? ???.4??,??TG??a??f?1??9A??????? ???\_1??? ?Y???~wC???fhF??o?nh?~t??f???:?? [?5s??f? ?8?U&?????|?????? ?w]??????? +?T4C3???3tC3?? ?h6K=&??>e?r??kL7kNP???2?G>?>H??v.?WL????>E??.???P`?????f?????]hF?Y?1???)??k?\c?Ys?*?}??=R??i?? ??l???O?%???w7??h?ft?Y?f??f?G??l?zL?#}???Z3??n????s_er?T?{??gBp}???.?Sd ?b?? f*??]h???????f4????H?2l???5??5'???W??#???v??\_1??? ?Y???~wC???fhF??o?nh?~t??f???:?? [?5s??f? ?8?U&??? ?N??L?????????????rE??.&~?:g???Mrq>????t7??????f?G??l?zL??m?\k????Tq??L???.?v??????G???????;???,?]L??u?T?????|p????n???!?o ???>4??,??????2l???5??5'???W??#????|??u1y?\?%?????????c??\?. at 343????1$??????f4???t?_[?-????t??U??*?{?_?!???.&??+??w1?[?9SUwl?????????m&????????????&4 ]2h+?.E?B?7]?Az?;?f?`?04s?Y? ???? +{??qC??yuR??????W??r????SM??;?? ??????)???????l?yB?y????f?P7k?E??i?;e???????????o.????w?b>???^W??B1?6?TS$????4C3?nc?cJ0kh?~?C37?e??l?k????7???gQ'?o?N,???C?????v?? ????????Dk?o???y*?P???=????q?????????????????f?'4???*?qk? u??Y?I????SK?}?i???????B???{?,?3???ue??!?nsO5E"?p?8 at 34??6?:???f??>4s?Y? ???? +{??qC??yd????,?v:lW???1V??h????????E? ?ve??!?nsO5E"?p?8 at 34??6?:???f??>4s?Y? ???? +{??qC??yd????,?v:lW???1V??h????????E? ?ve??!?nsO5E"?p?8 at 34??6?:???f??>4s?Y? ???? +{??qC??yd????i~???_+?R +e??????/?3?P~???S9?b?m???H???h?f???^??`?? ???fn6?Db?z????B?=??2O??Y????"w8???Qw{S?YC3?s???,??f?\[?=n???n?4s?Y? ???? +{??qC??yd????i~???_+?????]?~n??)?R +ew??P^v?q?????????_4?????? ??2O??Y????"w8???Qw{S?YC3?s???,??f?\[?=n???n?t1h?p*m? ?{XahY +??T?X?'V+?????C?7??o?s????p?fhF???>???f??>4s?Y? ?????z??qC??]A?????????,B1?&?T],????4C3?n??1&?54C??????rO(&??u?????f? +???u????WUd??Y7y??b?t8???Qw???1???????l?{B1y????f?P7kW?????????" ?P???3U????|???????}? f ???}h?f????{m??5???Y??&-}???%?UY0?b?M???X$??h?f??l?cL0kh?~?C37???PL?k????7???6i??l?-?????!?n?L??"?p<8 at 34??f{c?YC3?s???,??b?^[G=n???n???IK_g?m |UE ??u?g?.I??????u7??????????f?'???:?qk? u?v?MZ?:?oK??*?`???p?fhF???>???f??>4s?Y? ?????z??qC??]a?????yqv?????}???q?l7???$ ?D?=?????!?n?L??"?p<8 at 34??f{c?YC3?s???,??b?^[G=n???n???IK_G???8;QnW???>??8V???|gB??xXUd??Y7y??b?t8???Qw???1???????l?{B1y????f?P7kW?????|G???v??K?0????b6U?n??;???WY0?b?M???X$??h?f??l?cL0kh?~?C37???PL?k????7???6i??(??????????$ ??????M??????7??UE ??u?g?.I??????u7??????????f?'???:?qk? u?v?MZ?:?w??q?l7??4 !??o,fS??fy?C? ?xU?C(f?????E??x>p?fhF???>???f??>4s?Y? ?????z??qC??]a????????fy.?????o??`???DH???I?w?wm}????_*?Z~&?s??W????0??????d????????????O/????}$}?????hRY0?b?M???X$??h?f??l?cL0kh?~?C37???PL?k????7???6i??(??C?n?????:?~???v.?N???????9?????g?Y??????????N?F<{*2??????c?,??(?wtJD-???#b??????????a3???G+???????a3k6K?c?{m)X#[3??nV=z*1????X?? +??? ?r1??B&??n????r?u???+ ?????L???d/?vy???s??fK??R???603!???w??`?P"?h???k??=????N??A? ???f?8Z ?5l????Y?Y?F#?kK????Ft???S??N????z?]P???l?????2)?v?e?(????\?^?'???e^.%{???K??~? 0[Zn?JMM?????_?? FC??FK's ?} ???5??HMM?Q??|???7??~0&???n?2?A???S??9???: ??M]?NB?h???zD?u#?T?B?:?`3l&????h%?????>lf?fIa?|?-kdk&?????S??f}32)?9?>??P???^??&?(mq??v??gj??? ??s7r???\???????PX?O???.g'?D??? O="???{*O!a???? ?6???q?\k? ?Y6?f???0F????5?5???f??[??N^?? ???~????f?a/RSm??8?v???3?LE? h????yP.????b~|(?N??s??A???P"?h???k??=????N??A? ???f?8Z ?5l????Y?Y?F#?kK????Ft?????T'?Y???L?`?????T?????????G?????? X?#??????8?\???K}g ???--7 +f?G %? ?O???n???SHX'?? l??Dw3{??6?~?????,i#?????`?l?d#?Y??Vb????o?B&?0???????Ys?????v???G?n?h??,L???s??|l?e?????DC??????????'O?Z7rO?)$??q?6?f???=?V?k ?a???f?l??????R?F?f???zx+1??k?7c!???c??c????9????d;????#`?{?t?&?h???b>6??a?R?Y??fK?????QB?h???'b??????????a3???G+???????a3k6K?c?{m)X#[3??nV=???????~?n?????????:?s?iarL???(`?{??W?????4??r1??;???????;^.%{)#???????}??????g"??B!???A??^??j??sD?R=????????W?n?????????:?s?iarL???(`?{??W?????4??r1??;???????;^.%{)#?????????Syy?3???B!?????A??^??j??sD?R=????????W?n?????????:?s?iarL???(`?{??W?????4??r1??;???????;^.%{)#?????????Syy?3???B!?????A??^??j??sD?R=????????WC?}L!???F~cf$????8%r;?????^??z????~s???M??8N??+??? ^[??????????]<???)#????d??hh@???z?????m 4??{*O!a]?????wE?v?!j???n?]????T?~??m?rE?6???>?_?]??????~#???? ???o?J????3???OC??k?3@?qw??eI????;?????????#??????O???|?>????qL???5??GLXB??Y?y??La(?p3n??-?u ?5n???????Y%!????N1D?5???k?Wr?*??????_?H?f?|???????y|???o???????[????z?s~?iHR?cmv?=?n??,????u??X??>?{$y??T?????????{?7???[?f?? KH5?:?T?)????n????E??A?????_?q?x6?$??\?)??5???w??JNR???QJ?F??{w? s?ho? ?????????y\???7/???????2< N???On???I?-;?+?`cq??0??5??c?u??*SX?????aw?z??g??q???f?lVI????S QkF v3??????????Q???????8??.H??y?3F???pWGF????????!imvF?? ??????????1p?o???"?X?7?n?&n?6???u?g??V???@7?f???^? ?Y?f?/??YP???v???1Hx?????n?f??:?k;??f?`7???^?I?0?#??Q0??????|?`????[??~0f]???2??x>P???v???1Hx?????n?f??:?k;??f?`7???^?I?0?#??Q0??????|?`????[??~0f]???2??x>P???v???1Hx?????n?f??:?k;??f?`7???^?I?0?#??Q0??????|?`????[??~0f]???2??x>P???v???1Hx?????n?f??:?k;??f?`7???^?I??z??Z]?;??Y????n?Q?E?v:?f?`???3Ue ++@?|???q3?nQ?c???q3????`V??^|?5?$??\?)??5???we{%'????rku ?Tsg?>?_???F??????m??1?:?T?)????n????E??A?????_?a4??YQ??{?????@?sm?????f?????? +?=r:??f?,?m?????S?v?S?I??????|?{`g??l??Y?y??La(?p3n??-?u ?5n????Y=%??g??d?Z?JB??:??f?`7??l??$U??????6e?ho? ?w-?????jNR??^?????;;?f?`???3Ue ++@?|???q3?nQ?c???q3???)t>??&C??U?l?)??5???we{%'??|????@??(KG{????k????Ts?????5????5??c?u??*SX?????aw?z??g??q???hVO ???5????@g?N1D?5???+?+9I?{???h?/K?g??zv?w?S?I????9p\??|?{`g??l??Y?y??La(?p3n??-?u ?5n????Y=%??g??d?Z?JB??:??f?`7??l??$U???[?K?}??,??y???%??N5'?????q}??a????[??~0f]???2??x>P???v???1Hx??????f??:??Z?!j?* t6?C??Q????????Ta?Gn?.??????~???g?|?;????__???]???vvn?6???u?g??V???@7?f???^? ?Y?f?/?0??S?|FkM??5?$???S QkF v3???JNR???????? ????}??,????Q?m6??|??~????m??1?:?T?)????n????E??A?????_?a4??????? QkVI??Y?????f?????? +?=rku ?m$?3?G?`YR??S???l?}??!$?????5??c?u??*SX?????aw?z??g??q???hVO ???5????@g?N1D?5???+?+9I?{???8??.H?gd???????O?FA??(??CH??a?sk?????P??x???L?O?F???v:?f?`???3Ue ++@?|???q3?nQ?c???q3???)t>??&C??U?l?)??5???we{%'?????^??&u???$??m$??J?T{`?sk?????-???n???? ?S?k??q???h?9???pj?b $??JO????f?5???? u????? +??v%???}?p!?????C??my9]0??????!hQ?p3n??m???^k????F??1H&?h??Pk[ ?lVz +?f?`7??y??n??e?|????N???w?^Jz????}[?DN??nr??|Z????aw?9???7?~?????#?L?? +W??,?@2???j???nV_?*Lm??g???r!???I?^???fC?7?????z)???~n?my9]0??????!hQ?p3n??m???^k????F???@2?G+\?Z???d??S?5??Y}??0?u?????S???H?????& ????wy9]0??????!hQ?p3n??m???^k????F?l??=Z?*???H&???B?5????R}??0?u?????S???H?????& ????wy9]0?-?3U?C??2??f? ????1%??????f? $?{??U?5?-?L6+=?Z3j0?}????Waj?0???g?????????M40???$r?`?[&g????Ee>P???v???cJx?q3?7~??H&?h??Pk[ ?lVz +?f?`4??K?5??????x?\Hw?W??n????o?A??I]????u??~?'O"? ??er??|ZT?5???aw?9???7?~???,??dr?V? +?f???f??PkF F??????U???C??? ???????mW]???5????#?????????o`??I?t?x?L?T?A??|???q3?nC????Z?f?o?0?e3?L?? +W??,?@2???j???h?;?"??? +S[w?3???.????????o????Sa???????????7????????~?'O"? ??er??|ZT?5???aw?9???7?~???,??dr?V? +?f???f??PkF F????T_?*Lm?????>~??n????y"l??O??n??? h??????????? ??P???v???cJx?q3?7~??H&?h??Pk[ ?lVz +?f?`4{ ?K?5????????mW???o?4???M?????+???? `?{Y?y9]0?-?3U?C??2??f? ????1%??????f? $?{??U?5?-?L6+=?Z3j0??? ???U???C????Jx?????c????A??pw{%?{??s/?'O"? ??er??|ZT?5???aw?9???7?~???,??dr?V? +?f???f??PkF F?p!??? +S[w?3?~?] ???y?x???7Y ?{?n??w?^??y?e??I?t?x?L?T?A??|???q3?nC????Z?f?o?0?e3?L?? +W??,?@2???j???h?.???Waj?uf???+????}??f??=??C?{Y?y9]0?-?3U?C??2t?? +??????J??Q???u?9???7?~???,??dr?V? +?f???f??PkF F?p!??? +S[w?3?~?] ?g??x???7?? ?9_.@??????????n???:??????W??<7?'?Q???n?????1%??????f? $?{??U?5?-?L6+=?Z3j0??? ???U???C????J?<;??c???Y/q??r?P?^?O?DN?w??L????????????9????fp?~??~?)???????a4?f ????B?Yl?d?Y?)??Q???\H??? +S[w?3?~?] ?g??x???7????>????$r?`?[&g????Ee>`4?g?fp?~??~?)???????a4?f ??o?B?Yl?d?Y?)??Q??L?????*Lm??????v%\??B??????_??????????n???:?????L?A??Q???u?9???7?~???,??dr?Q? +?f???f??PkF F3}?^_??0?u?:3?w??pyv +???{?^|m?3/?/O"? ??er??|ZT?F3}QkF7??m???^k????F?l???F?*???H&???B?5??={}=????????mW???)4????z???????????$r?`?[&g????Ee>`4?g?fp?~??~?)???????a4?f ??o?B?Yl?d?Y?)??Q??L?????*Lm??????v%|????q?^ty?????c??r??????????n???:?????L?A??Q???u?9???7?~????_????V? ??V???>?m?#?,??|?4rA$?BH?*? R?? H?9??5B???r?,}??????5?S +4???`h????fK?????h&w??y???I?ug????~???????????????????????????6?bq?4g?TQ?C??? Bk??????=???5o???????R??o?Ck?%h6[J?? ?E3??o?cm?L??;?_??????????????/G??????????g?w???????9S????|?h&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{k3d?g??????g?w???????9S????|?h&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{k3d?g??????g?w???????9S????|?h&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{k3d?g??????g?w???????9S????|?h&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{k3dR????5?m~??li?T??z?2,???? ?7;?-?{??k???]?E?1?@s?? ???K(?l?? ??frg???? ??n?pM}??8[?3U??????fr?5C???u??w???7?~?g?lL)??7???5? +4?-%Ck?????????6C&??[:\S??G,???L?*?w(??????b39???!?f???~?;?o??y???h6?h?s???u ?????5Cc?L????X?!?b?-??o?#gKs?J?;?????IL???Ah?x?s?b??????????Y4S +4???`h????fK?????h&w??y???I??????????9S????|xi?$??L? ?f???n???N?[?f???,??)???\0?f]B?f??dh??X4?;???4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{k3dR????5?m~??li?T??z?2^?=?)6?3?ov?[?????????>?fcJ???1 ?Y?P??l)Z34????=??2)??????6?bq?4g?TQ?C?/??????? ?7;?-?{??k???]?E?1?@s?? ???K(?l?? ??frg???? ??n?pM}??8[?3U???????fOb??? Bk??????=???5o???????R??o?Ck?%h6[J?? ?E3??o?cm?L?}?t?????X?-??*UT?P??K?'1?fr?5C???u??w???7?~?g?lL)??7???5? +4?-%Ck?????????6C&??[:\S??G,???L?*?w(??????b39???!?f???~?;?o??y???h6?h?s???u ?????5Cc?L????X?!?b?-??o?#gKs?J?;?????IL???Ah?x?s?b??????????Y4S +4???`h????fK?????h&w??y???I??????????9S????|xi?$??L? ?f???n???N?[?f???,??)???\0?f]B?f??dh??X4?;???4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>?4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>?4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>?4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>?4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>?4{Sl&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>H?h6?3?ov?[?????????>?fcJ???1 ?Y?P??l)Z34????=ok3dR????5?m~??li?T??z?2??w +4???? ?7;?-?{??k???]?E?1?@s?? ???K(?l?? ??frg????2)??????6?bq?4g?TQ?C???;??? Bk??????=???5o???????R??o?Ck?%h6[J?? ?E3??o??? ??n?pM}??8[?3U?????????fr?5C???u??w???7?~?g?lL)??7???5? +4?-%Ck????????mm?L?}?t?????X?-??*UT?P????N?f39???!?f???~?;?o??y???h6?h?s???u ?????5Cc?L?????6C&??[:\S??G,???L?*?w(?A?x?@???Ah?x?s?b??????????Y4S +4???`h????fK?????h&w??y[?!?b?-??o?#gKs?J?;?? }?S??L? ?f???n???N?[?f???,??)???\0?f]B?f??dh??X4?;??????I??????????9S????|?>?)?l&gZ3??\???q'??y3?w}????}c.Z?.?@??R2?fh,???}{??f???wK?k?????????RE?e>H?h6?3????!?0 ?? + ?#?$\?? Ar?D`??O]???/]?????lh?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s??n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s{7eR??????}~??ni?T????2??%??L? ?f???n???%~????{??fsJL????Z?!!??l??fh,??7?????2)????Z?>?cq?4g?TSsA????l&gZ3??Y?????k?????E?9%???X?????l?UZ34????\??M??o?p-c????[?3U??????h?i6?3?o??[?}|??5o????????|o?e??lH?i6????f??~?b??L???t?????X?-??*??\P??t??4???? ?7{?-?>????7?~?g?lN?i?7?2 at k6$?4?mU?? ?E3?f?W?wS&??[:\???w,???L?jj.(?A:Zb??? Bk???=?{_?g??y???h6??4?k?5b???*@k????|?????)?b?-?e??;wKs?J55?? -1?fr?5C???u???/???????Y4?Sb???? ?? 1?f[?5Cc?L???U???I????2?????9S??? ?|????f39???!?f???????Y?f???,??)1???Zh????f?? +????h&???*?n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s{7eR??????}~??ni?T????2??%??L? ?f???n???%~????{??fsJL????Z?!!??l??fh,??7?????2)????Z?>?cq?4g?TSsA????l&gZ3??Y?????k?????E?9%???X?????l?UZ34????\??M??o?p-c????[?3U??????h?i6?3?o??[?}|??5o????????|o?e??lH?i6????f??~?b??L???t?????X?-??*??\P??t??4???? ?7{?-?>????7?~?g?lN?i?7?2 at k6$?4?mU?? ?E3?f?W?wS&??[:\???w,???L?jj.(?A:Zb??? Bk???=?{_?g??y???h6??4?k?5b???*@k????|?????)?b?-?e??;wKs?J55?? -1?fr?5C???u???/???????Y4?Sb???? ?? 1?f[?5Cc?L???U???I????2?????9S??? ?|????f39???!?f???????Y?f???,??)1???Zh????f?? +????h&???*?n???K?k????????RM?e>HGKL???Ah?x?g?b??K??y3??~?????{c-?fCBL??Vh??X4?o?s{7eR??????}~??ni?T????2??%??L? ?f???n???%~????{??fsJL????Z?!!??l??fh,??7?????2)????Z?>?cq?4g?TSsA????l&gZ3??Y?????k?????E?9%???X?????l?UZ34????\??M??o?p-c????[?3U??????h?i6?3?o??[?}|??5o????????|o?e??lH?i6????f??~?b??L???t?????X?-??*??\P??t??4???? ?7{?-?>????7?~?g?lN?i?7?2 at k6$?4?mU?? ?E3?f?W?wS&??[:\???w,???L?jj.(?A:Zb??? Bk???=?{_?g??y???h6??4?k?5b???*@k????|?????)?b?-?e??;wKs?J55?? -1?fr?5C???u???/???????Y4?Sb???? ?? 1?f[?5Cc?L???U???I????2?????9S??? ?|????f39??????c?]?,?????&),?c?I%.?m, ?*????d???5??M")H? ?) ????+????*f??g?S;|3???n???$v???????????}c??f]DL??R8h???h&???z???H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f?????? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&???z???H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f?????? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&???z???H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f?????? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&???z???H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f?????? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&???z???H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f????lm?D +??4???M????4g?TRu@??V?l&gZ3??Z???x;k??????fcLL??1?Z?."??l)?fh|4?w?}ek3$R????5?mz????9S????|????f39???!?f???????Y?f???|4cb???9w??u1?fK??5C??????+[?!?B?- ??o?3>?-??*?TP?????4???? ?5??-?>????5?~??????o???????i6[ +????}_?? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&?????fH??oK?k?????Ks?J%U?? m51?fr?5C`??u ??'??f?????h6??4?s??5?"b????Ak??G3yg?W?6C"?~[\S??g|?[?3U*?:??i??i6?3?kv?[?}??;????)??????6=??????RI?e>H[ML???Ah?X?k?B??I??Y3?w>??11????;h????f??p?????L???????H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f????lm?D +??4???M????4g?TRu@??V?l&gZ3??Z???x;k??????fcLL??1?Z?."??l)?fh|4?w?}ek3$R????5?mz????9S????|????f39???!?f???????Y?f???|4cb???9w??u1?fK??5C??????+[?!?B?- ??o?3>?-??*?TP?????4???? ?5??-?>????5?~??????o???????i6[ +????}_?? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&?????fH??oK?k?????Ks?J%U?? m51?fr?5C`??u ??'??f?????h6??4?s??5?"b????Ak??G3yg?W?6C"?~[\S??g|?[?3U*?:??i??i6?3?kv?[?}??;????)??????6=??????RI?e>H[ML???Ah?X?k?B??I??Y3?w>??11????;h????f??p?????L???????H??????????L?J?(?A?jb??? Bk???]?zObg??Y????l??i?7??Ak?E?4?-??? ??f????lm?D +??4???M????4g?TRu@??V?l&gZ3??Z???x;k??????fcLL??1?Z?."??l)?fh|4?w?}ek3$R????5?mz????9S????|????f39???!?f???????Y?f???|4cb???9w??u1?fK??5C??????+[?!?B?- ??o?3>?-??*?TP?????4???? ?5??-?>????5?~??????o???????i6[ +????}_?? ??mipM}????oi?T????2??&??L? ?f???n???$v???????????}c??f]DL??R8h???h&?????fH??oK?k?????Ks?J%U?? m51?fr?5C`??u ??'??f?????h6??4?s??5?"b????Ak??G3yg?W?6C"?~[???}?~?v???o?}????????>??#z?G????w??_~????????~???o???????_???>??d?????Ks?J%U?? m51?fr?5C`??u ??'??f?????h6??4?s??5?"b????Ak??G3yg?W?6C"?x?????}?~?v???o?}????????>??#z?G????w??_??~??6???Xr?6i?M?g??,l;?h?^Z\?????xO??-???K?q???={i?E"u ??U?%???????y??C??P?2?????Y}??^G+???n??5??2^d?7?a?f)2??"n??Q?????G?7????G?M]?e??>?E?]?1oES??v??C?????/'?V!7?6?s???,*????`-*?-?3?2 j?2?J ??? ??????nn????gM7?????f/2????[????J?f??hF???#T??~J??#g??????g??????7???? @;]????l???h???F??9??? m??d?????J?q?T%??ftqk??nV_7???J?????~?OE?????b???Y???f???[3nT4?{??*?M?????????6???~????|:D3?+?d??????/?OF??O?]m?}R*?-?3?2 j?2?J ??? ??????nn????gM7?????f/2????[????J?f??hF???#T??~J??#g?7m????+??5?t2?fW?\??=??_???????,*????T?[2g*e@??e>P?2?????Y}??^G+???n??5??2^d?7?a?f)2??"n??Q?????G?7????G?fWl??9mw????????S??????6???a?E?????????}????T??L?=???K?????n???i??i?z; +?????q???(??? ?l?????{??G???#T??~?Q??_????fv?@?]?nm?*?x??F?q?f? ??y?o????dp'Ts??nH?K????? ??Q?4M?4w?A?~?????{???id6?t???=j??{??*?M???o?/?L??m3;g???y??t]<{ +P??8{3??????^df????^??{??w??cB???-?Nk??????????Q??@Ub ?lFg?f?f?us{?D?k?????T4?xA???(????< ?lV?8?5?FE3?g??r??S2??d?m??+?Pow z?????Bn???D??<6j??? x????? +??B?t????A?p???"?????x??;9b????Z??4?F=?2g*e@??e>P?2?????Y}??^G+???n??5??2^d?7?a?f)2??"n??Q?????G?7????G?7?????G?????????A?~?_???????H????U?M[2??jG??????????{??????[????On??|kA???????~??F?????g?|?S?X?????XL??p???_????p???~v=?8??U???/??s?T*?t&e>Pc???A??I???us??#????????h???8?]dHkVz?8???!??46??;??.??(????????????}?d?????????????????????G?????????-??????d????~{?????????\-?? ??????zpq>k?"\?),^?????T?L?|?? g3:??5?@?m??zG?gM?i??????q?7??????q6?cCZ3il4?w?i]yP???W????>}?>????[??G????o???_}??.??????????|?xF?????`???3d?I[???L?HgR?5f?8??$???l[7??8=k?L????f?????E??f???Y??Ic???O?r????????M?G?>~?????????p?????????>?f???E?g?z?G_>yPc???A??I???us??#????????h???8?]dHkVz?8???!??46??;??.??(?oK%W[???L?HgR?5f?8??$???l[7??8=k?L????f?????E??f???Y??Ic???O?r???r??Tr?U???mq?T*?t&e>Pc???A??I???us??#????????h???8?]dHkVz?8???!??46??;??NC^????????p??o?s?R ?3)??3@??? ??Lm????}??5m????F???y??"CZ?????? i????????u?:?\?-?\mU?kl|[?3?J ?I????lFg??fh?m?\??H??i3??6?5> ??F??? ?f}lHk&??f??>??????m??j?"\c?????T?L?|?? g3:??5?@?m??zG?gM?i??????q?7??????q6?cCZ3il4?w?i???(?oK%W[???L?HgR?5f?8??$???l[7??8=k?L????f?????E??f???Y??Ic???O?4?u@?~[*???????8g*?@:?2?1???? i?$?f???????Y?f?o?l4k|@???.2?5+=@??????L???}Z?!????R??VE?????9S?????@? ?ftIk&?6????>?D??6?~?g?Y???ot?!?Y??l????f??hF???: yP???J??*?56?-??J%????j?q6?3HZ3 ???n??q$z????????}?? i?Jg?>6?5??F3zg??i???r??Tr?U???mq?T*?t&e>Pc???A??I???us??#????????h???8?]dHkVz?8???!??46??;??NC^????????p??o?s?R ?3)??3@??? ??Lm????}??5m????F???y??"CZ?????? i????????u?:?\?-?\mU?kl|[?3?J ?I????lFg??fh?m?\??H??i3??6?5> ??F??? ?f}lHk&??f??>??????m??j?"\c?????T?L?|?? g3:??5?@?m??zG?gM?i??????q?7??????q6?cCZ3il4?w?i???(?oK%W[???L?HgR?5f?8??$???l[7??8=k?L????f?????E??f???Y??Ic???O?4?u@?~[*???????8g*?@:?2?1???? i?$?f???????Y?f?o?l4k|@???.2?5+=@??????L???}Z?!????R??VE?????9S?????@? ?ftIk&?6????>?D??6?~?g?Y???ot?!?Y??l????f??hF???: yP???J??*?56?-??J%????j?q6?3HZ3 ???n??q$z????????}?? i?Jg?>6?5??F3zg??i???r??Tr?U???mq?T*?t&e>Pc???A??I???us??#????????h???8?]dHkVz?8???!??46??;??NC^????????p??o?s?R ?3)??3@??? ??Lm????}??5m????F???y??"CZ?????? i????????u?:?\?-?\mU?kl|[?3?J ?I????lFg??fh?m?\??H??i3??6?5> ??F??? ?f}lHk&??f??>??????m??j?"\c?????T?L?|?? g3:??5?@?m??zG?gM?i??????q?7??????q6?cCZ3il4?w?i???(?oK%W[???L?HgR?5f?8??$???l[7??8=k?L????f???_?,?"M???HpwB?0?=????????`a???D v??????????:VW`f?=>T?/???e7??oL??????i6?s????}[???O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???)?????&=??li?T??r?2??"??L? ?f???n???Nl?Y3????h6??4?S??5k#b????Ak??G3yg??s??D +??4???I??8[?3U*????i??i6?3?kv?[????k???}>? 11????9h????f?9w?????L????\?>?B?- ??k?3>???L? +*w(?AZ+b??? Bk???]??;????5?~???fCLL??1eZ?6"??l??fh|4?w?m=??O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???)?????&=??li?T??r?2??"??L? ?f???n???Nl?Y3????h6??4?S??5k#b????Ak??G3yg??s??D +??4???I??8[?3U*????i??i6?3?kv?[????k???}>? 11????9h????f?9w?????L????\?>?B?- ??k?3>???L? +*w(?AZ+b??? Bk???]??;????5?~???fCLL??1eZ?6"??l??fh|4?w?m=??O??oK?k???????9S????|????f39???!?f?????;??f?????????oL??????i6?s????}[???)?????&=??li?T??r?2??"??L? ?f???n???Nl?Y3?~????????b?!&?y??2?Y?l6?Z34>??;???K?'R????5vMz??????RA?e>HkEL???Ah?X?k?B???^?f???????? U??;?fCLL??1eZ?6"??l??fh|4?w?m=??O??oK?k???????9S????|????f39???!?f?????;??f???E?w??A????w??????}c??fmDL???;h???h&???z.u?H?????5?gKs?J?;?? ?1?fr?5C`??u ?wb{??!???F;????????l6eZ?6"??l??fh|4?w?m=??O??oK?k?????????>T?P???V?4???? ?5??-?w???5k??????????fCLL???9h????f?9w?????L????\?>?B?- ??k?3>?????PP?C??Z?l&gZ3??Z???q'???b?_o???o?_? 11?fS??5k#b????Ak??G3yg??s??D +??4???I??8[???CA?e>HkEL???Ah?X?k?B????k??N??]???9h??????v????? b?!&??l??fmDL???;h???h&???z.u?H?????5?gK??}(????i??i6?3?kv?[????{????}????7?"?~??N???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s???;??w=????5C????7?i?7^?/?????f?)s???1?fs??5C?????o???}"?~[\c??g|?-?????r?2??"??L? ?f???n???N|???wb???oZ3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?1?fr?5C`??u ?w?s?}???g??Z3D>??A???z|Al6??4?M??????i6?s????}[???)?????&=??li???;?? ?????cK? ?? &7?3`.?T?D??'??14X?}C????????h-????_??S??&K???Ah??f???~?'?9k|?24?? ?f?|??y?>o??"6?R?4?-????O??l??Z34>??3????v???w????io??l&wZ3???n???I|??? ?}?wD?"?x??!K??Z:h???h&??????1?B?-#\???W|????????????!K???Ah??f???~?'?9k|?24?? ?f?|??y?>o??"6?R?4?-????O??l??Z34>??3????v???w????io??l&wZ3???n???I|??? ?}?wD?"?x??!K??Z:h???h&??????c&??[F???????oi?YE? e?H{C?f3????!???u ?O?s???eh?3?#h?>???>~??/?8!????y????????l)?f}B?f??t?????L????}k?L +}??p?C?_?????~????~???,?fr?5C????=?????????g|G4??}???}?>M_?qB?9????/b?)%K??R8h???,?fk??5C???>/Filter/FlateDecode/Height 990/Length 71926/Name/X/Subtype/Image/Type/XObject/Width 1038>>stream +H???Oh?????l????b?C??T?]?(??#6*??.9?B)tEQ?=4??A?????? +??B ???!??????{J?Tk#h?????~????>??aL&?L~??????8??|???|$??b?X,??b?X????'_??? ?5?tG?x?L???%?????)7??#M?L???????Q?m????????O??????i?$}> i?MD??H?$???0??j?D?q??= '??`?S?;?w?lZ2I?H?r?;?$?$}>?o??-u?vz?I?|???????(??L??????~????r??_>???\3??D???4I2I?????rV?!X`v? ????????z>?mT?I?>?T}????? ??w?]2??3????e^J?I? ??)??????d*??u ???[?T?????Ps?g?????;? ???s??|??_???Nd?o??2? ?b?{???O???????;?? j??????L?Oc???i?S?!????i??3???9_9???????/?????z?Z??4?????.&????F(?Z?L????A? ???? ?jwW????h]S???F??&4:#k??6~????5?nw?A?????f#? ??^y??^?=?e??gr~??$?%?]Z?????#??Mu?4?l??0 ?0 #??\????H?~???fs?U)yf??o?uT??V???b??y?? j?,?????gr~??$?%?]Z?????#??Mu?4?l??0 ?0 #??\????H?~???fs?U)yf??o?uT??V???b??y?? j?,?????gr~??$?%?]Z?????#??Mu?4?l??0 ?0 #?ZJ??????*@?}??H??????{?/?Z?? +??????????p$]?`?V??br&?g?X?a??*@???Dv?l??_????t??n(?6????sL?qa3`{}?????*!}???V??^????)??T3?5?eq?N?X? ?O^????7??e?g?,?C??T?? +??+???c.a?i&?y????9????j???S???????????9?v??>?a?a?Xk)???????yf????H??HO??T;?W?????kQ?????5?k ]Gl"?f6????AK:w?n8??t??{???k\? ?^????*?JH?E????UG4???z??eJ~6?Ly?lG@???*V~C??q?,??M?fY??+?2?~??v????9}?$? + #??K^?5[6PX?-??fy?:?j??????Y?a???h?????a?adC?d?&??zdr?????5FUyp?a|[??* u1?U75?gG}D?g????Q??=/?? 53y??????U???Z??I;/S?1???????]?a????|???-???????t?]1F?n??#I?;=????-?:(q???[??????.?kl@?o?S???,?}???L???`?T?}?!???? +?'W?`}$? ?n????????0???w?????l???????{??j?a?? ????|?9??E|???U??Y??? lq??$}2????M?=H?f&?)??x^]kT?J?,?]+??L????N$csO? +j????M]lI??c??vk?????C??8?R?[???R??X? mw?\c?????,??g?YN??G{?\c-????L???????,???9I??d!?aJ?c??????r9??3?3?\??0 ?0 #?%[5???s ??&????1?*???? ???nUI?.d??9;?#?<{8?L?L?S??????????Y~?VJy????=??H???f7 ??%Q5??a?aF6`?i-?;?X??` ?}???o???^??Z?@263?N?n?????:~?F?V0???Z??3@??{u?{?iH???f7?f??D??o[????7????M???D~6PP??o???? ^??wK5Sc93??&?klF?o?S???,?}?????56Z??(??-y???#J???Y?l?-8????}????9???+?^|^??????>???d|pM8?9;YoO?k? ????u??QK]7>??N????T.g?N???????r?}6???}v??????%D?S???????_K?????|s ?????Xy {?Q3 ????B??f^Ls?,??,?*?l??\??E??\? ??5?u?#??v?Mv9r??)&k$t?)?????~2?j???W2??<???j=-?j9??????{????_G H??N3/??[?eY?eRI?~??+_ 9}??7?|??? RM}Y+H?fFO????/k:W:g?f?P?a?????)??m??al?6????F???r?#?pt!@??8??)<&g|z???T.g?N???????r?}6???}v??????%D???-?? [,?5VV?iV? ??LG?????f?:J@ +5w?y1???,??,??b?5???jy? {?g??*?????$??l?????pU?????|?#???8@?53?????jp?[????O??]+F?s????3dL??????~@?L??%??E?w???A???]???,g6{6?=!~6K6l??'??? '0Y???et?? ??Q???:??UCF?? ;E?e??f?V? ??w:L?h??&??,??o??>??r?>?SL?H????d??S?jV?wUJd????r?n??????Z??????L??m@?>W??m???????K?RY???7w?y>?nY?eY??,?L?xXM#2????W?kX??k???W???*'???Q?L???????g?]??gw?? ?_b????{???2????????{??4???3???f?3{E?)????5?????5?????.?}??e??&?????5???$;????_e???????s?Xki???g?3y??i??\?v????'w..?Ke?:???i????eY?eYV?{??????????]?aTU????,?vu?{??T????Q?????v4d/?Vz??og???1??;??Q/??=??T?????g?g@jp?U???1????=O??d?v=/???,?,??o??>??r?>?SL?H??w#??}?K&????$????A??=?'?7????????{????;{??V??W??? ???????bd???"t???????????f?????,??,?Y???;?]?_? ?????U???? ?vu?{??T????Q????:/?e; ???S? ????3?n???????\?????;_?T??4???7a????(g?d-y?????=??QK??G?g??^???8?\ ?? ?d????,??E?]$???.?? +EF?B(??[?"?-????^T???QJS?f?H`Y?0?TR/v???d?p??y??????L????Q??x]?9??|?3?|?wh?t?}??????|??n}*?*?W?>0Ae?r?}?ud?D?U????,?zM/???J?????;i????????Li???T??%?L*v??????2?Re?W&?~???????,??9Gf,???y?d?????????v?J?U?x???????{?2?#7{?[[??????9??????wMj?t??t???="???Z????^????;??d?w????oD????>f?w???`:??;??/?'???C???Uw??????,?w?????_(???t?~?????k???????????o?z?????????L???UM??g??k???{~o??;?b?|???A???Nk?[k??A[???"??}e???T?#g?g_GvO[?????"???????dZ?????;????h?????i?L??Y???b??[?)?/U?se???9.?~??????sd???H??K????????Zo???_?????L?h???+?;Rq?7???e??x[}?Q?OJ??~???L?Jg}?#???U?=n???|???N?????J???I?!???3????;jq?tCc??s??s????K X?W??????G=?Q?????2????n7?|K?c???_GZ???I??3xW???;??\.?oz??x??";?lRn???7?%?s*??5?Cg??r~!????BD=????)9???xG?n?2:/j?d?? &????????>q?{?1??g???-,???z?TM?4O?e???f&;?p???H?{T?KY???e???dy??92c?g$??%?x?Z5u?!?^?R????ffUi?-?????b u;5?vg??v?????v???D???3???r_=????w?W????7=C??;?8d??{f?=pq??2c??s??s????K X?W??????G=?Q?????2????nQ???j;???*u?????UM???Um?Z?=t?[-?"~^ +/D??????????w???-????Nj?`?i??~?{?t3?0??W?a??f?#?>U????9?!???p? 3_a???????{g???W?~??zGNa?n?f?M gY??qa?n?f?? +3????wT?k??9??m?#?0K7? 3?&????????0K7? 3?|?f?n?;*??~?y????S?????f??YV?s?C?????f?? 3m7???Z?w??g[??)???p? ? ??,??9?!???p? 3_a???????{??;G???w?f?f?a??rp????f?f?a???0?L??zG?????#???;r +?t3?0?l98??|?{?t3?0??W?a??f???^?????l?9?Y?n?a6??eu>?=?Y?n?a?+?0?v??Qy??{??{?????,? 7?0?@??:???,? 7?0?f?i?Y??????s?=?zGNa?n?f?M gY??qa?n?f?? +3????wT?k??9??m?#?0K7? 3?&????????0K7? 3?|?f?n?;*??~?y????S?????f??YV?s?C?????f?? 3m7???Z?w??g[??)?????5??X???Y??l????98??|?{?t3?0??W?a??f???^?????l?9?Y??7?F??v6?!?]????????????zM???f???o]?0?q[|?h?E??gO1@???~?j? V????/?z??HH???o?? ?????/~?P?b?&;? ??#oY?R??#????I??/?Y?M?G???N??^9]??z?? ?H?=?g??+?/??{l????@???????????E?L?l??????z?}?6???k??7?f????Ky?5 ]???u?WP??}?M]?? +7_??0????????:c.?5?M6???Y??Mv????[?????`k?????5?:3l?????ih?(??g?'?#)?$???;P?????R???????????????????b?-?9\??x~????_e?7????4{L?%????k?=?F}??l?????? ?Sf???d???????M v??qQm?? ??hdh9???u5????^?c??(%??L??'??c?0?B{[??t??H?A??Ak't??6]??z?*B,??????i?j+?N??????v{B;?rO????1?e ?~?-?????y?????????????-??r??u^??????.??}/n?`=%`vy(K&y??++y?t?`'k????Y?F??c?Y^W??>????:??Rb??4?h}??9 ?-????M???K?vB?>h?E????"??}???????F??>???????:c.?5?M6???Y??Mv????[?????`k?????5?:3l?????ih?(??g?'?#)?$???;P?????R???????????????????b?-?9\??x~????_e?7??????????. +7?? QKd?)o?n??+???d(jTv??B?? ?????{?????L?M?L??_????{n???:???k? ????I???gg:?nbM???6S?????W`[Q?P?????H=?A?y??P??M??I??2??.v?.{pJV?U?1QJ ???y? #3,[hoK'?`??^?x???N??m????TU????L ?9f+??4??0?%????#??'`{K^?dn_??P?_?X?pX??wcc]P????&^ ?z?}??k.??g?'?#)?$????????=3?3?s?3?3?=3=???[?s?????m?oe?g??9???F??(#G?S?9??`g???qPyx ?)?tJ&?1?K&y(K?<(jR???o?? ??c6z??.??L???3?1QJ ????02?"a????t?6?5???f? v0???l?R???#??9?]u??????7+??l?2??[?????#???CQ?eB?1?H5????^???r~????g`??S???|?G???@?q6v +??\n?/??X???'??}>x?-?????y?????????????-??r??u^???h|+?????)??6?&E9";???,g???????`?????S?iY?L?\??,???,????I?:pv????P?4???????+??s??pL?CF2?u.???H?Kos#?`??^?x???N??m????TU???Y?wd???4???U???V??-b?i?(@?Q??d? + ????????M=?F?RvO??=???????SP??r????:?=I?????l)???@???????????E?L?l??????zh?E????"??????#??L????&?r?????P?m?kL?GR?r?&?Wh??????-l?X5???{"?????$P~?????5???K?'??;?I?g??eK?=?zf?g???g?g?/zfzf?=?\?p????5???Fo`?ss??3?????Bl"{??{??~?:?>? ????/?8???5???}l?P?L?P??y +k??e?^~ ??I???@qu/?????9}F8&J?!????? ???Ke_#??K?g????p?? ???3??b?`????`I}??;????"?p2"4???{$?}??? ?%dR?]???q???w?{??????;????7?s8?????????/Y??lz2 ??tjwZ?/???`?f?u??`?S"`?DH?B???? ??????5?U???^???cl?mc??iB L(ne??$>?.,???f????/?zF7[?ov??=|v ???}VeI???????]???????_Z>?:?$;?????GCl?x???w,??^f=??K????r???7?}B?H??$?[3K????,???*????L??;???Ew?;?uo~?p????5?ey:???????`???2??d?k?'?d???=?????????2)?.???? +tg?;????tgj????r???9??xtkcm??w???????o: ?N?+? ??3??????d????5??0??Iu?l*?|?va?.-? Kb??~I?$7???2????5'g???-?%*jH +a?;????T???!?8??]Z*"??*S??XIn??e??5xN?wY>??U?;s???Mw?;S??Lw??????8????e???~6??;0:????d'?|??kN??1?[fKT????w&???????C?q????TD6,?U??%????H+???k????|~??@w??3???Lw????,?????q^???1??%??l. ??Z(n???`TT?}Lv??G ?][??????x???{\?3????tg?3???tg???????}?L:g???5??l. ??Z(n???`TT?}Lv??G ?][??????x???{\?3????tg?3???tg???????}?L:g???5??l. ??Z(n???`TT?}Lv??G ?][??????x???{\?3????tg?3???tg???????}?L:g???5??l. ??Z??+???u?#e?a?.@??^???????F??XD?????91RO????`?+?hf?YY??U??~?7}??Z????>?|??=??h???/????F???|v??O"?}x????(??6??}n???m??]?;?C?????f{?f?n?;.??????;?{???????=?CS????Y????K????M?o???n?=????}?>?????c??t??????iWc??[v???a6o?f??f?Y?y??x??w???l???????,M????y??gy????.????6???b???o????g?}?$r???yo???os????]??o?5??;???n?a?W?af?????????O???wD +?y3?0?lA?e????f?f?a??^a????????zw>????)???p? ?m??]?;?C?????f{?f?n?;.??????;?{G?0?7? 3???[v???a6o?f??f?Y?y??x??w???l???l? 7?0[?o?5??;???n?a?W?af?????????O???wD +?y3?0?lA?e????f?f??k???h?7k{/???f? 3k7?????|????#R?????f ??-??w|?0?7?m_?/?E??Y?{y?e4?f?Y?y??x??w???l???l? 7?0[?o?5??;???n??}y-??????+-?Y?0????{??{??;?|g{??f?f?a???6x????!???p????V?^???h?2?f?n?;f?LDDti??????;???n?????????-?Y?0????{??????.????w|?0?7?m_??[?z][??e4?f?Y?y???3??y?c????f?f??kv~+Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p????V?^???h?2?f?n?;??f?f?a?????#R????????(Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p???|???5)ZF? a???????????n?aF??y??f?f??kv>?????-?Y?0????{G????? 7?0????wD +?y3??5??????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}??;????I?2?e3????wD ??,?p? 3??{G?0?7?m_??N??kkR??f? 3k7?Q?3K3?0??Fw??)???p???|???5)ZF? a???????????n?aF??y??f?f??kv>?????-?Y?0????{G????? 7?0????wD +?y3??5;E?umM???,C?af???#j?afi?f???n?;"???n????????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}??_rW][??e4?f?Y?y??~?Y???f4????Ha6o???f??h???I?2?e3????wD ??,?p? 3??{G?0?7?m_??Q?^???h?2?f?n?;??f?f?a?????#R????????(Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p????F?^???h?2?f?n?;??f?f?a?????#R?????????h???I?2?e3????wD ??,?p? 3??{G?0?7?m_???z][??e4?f?Y?y??~?Y???f4????Ha6o???f???U???h?2?f?n?;??f?f?a?????#R??????Y'w??5)ZF? a???????????n?aF??y??f?f??k???]umM???,C?af???#j?afi?f???n?;"???n????????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}??G?z][??e4?f?Y?y??~?Y???f4????Ha6o???f??h???I?2?e3????wD ??,?p? 3??{G?0?7?m_??Q?^???h?2?f?n?;??f?f?a?????#R????????(Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p?????h???I?2?e3????wD ??,?p? 3??{G?0?7?m_??7????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}?n??????-?Y?0????{G????? 7?0????wD +?y3??5;D?umM???,C?af???#j?afi?f???n?;"???n???????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}?N?z][??e4?f?Y?y??~?Y???f4????Ha6o???f??h???I?2?e3????wD ??,?p? 3??{G?0?7?m_??A?^???h?2?f?n?;??f?f?a?????#R???????? Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p???t???5)ZF? a???????????n?aF??y??f?f??kv:?????-?Y?0????{G????? 7?0????wD +?y3??5;D?umM???,C?af???#j?afi?f???n?;"???n???????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}?N?z][??e4?f?Y?y??~?Y???f4????Ha6o???f??h???I?2?e3????wD ??,?p? 3??{G?0?7?m_??A?^???h?2?f?n?;??f?f?a?????#R???????? Z?kkR??f? 3k7?Q?3K3?0??Fw??)???p???t???5)ZF? a???????????n?aF??y??f?f??kv:?????-?Y?0????{G????? 7?0????wD +?y3??5;D?umM???,C?af???#j?afi?f???n?;"???n???????&E?h?!?0?v??5?0?4? 3?ht7???l? ?}?N?z][??e4?f?Y?y??~?Y???f4????H??>????`vYw?hI4,?,0[?*F#e{O??!?e??????`vn???f_m ??4??Y?0???-zG????? 7?0????wd +?s3??5??hk?6?i?*?f?n?;??f?f?a????#S????v???E[??)M?hV!?0?v???5?0?4? 3?hw?????? ?{??.??MiZE? +a???[???????n?aF??E??f?f??k?u???mJ?*?U3????wd ??]?^z?|u????wo/e4?f?Y?y??~?Y???f4????Ha6o???f?>]?^z?|u????wo/e4?f?Y?y??~?Y???f4????Ha6o???f?>]?^z?|u????wo/e4?f?Y?y??~?Y???f4????Ha6o???f?>]?^z?|u????wo/e4?f?Y?y??~?Y???f4????Ha6o???f?>]?^z?|u????wo/e4?f?Y?y??~?Y???f4????Ha6o???f?>]?^z?|u????wo/e4?f?Y?y??~?Y???f???_~?L?????\??)???p????k??^?????2?e3????wD ??,?p? 3??{G?0?7?m_?/?E?z??7?J?h?!?0?v??5?0?4? 3?ht7???l? ?}?????????+-?Y?0????{G????? 7?0?????????E??)???p???W??cmM???,C?af???#j?afi?f??????}?Y??????????#R??????Y??????-?Y?0????{G????? 7???=????,??OOy;z??{G?0?7?m_?^E;??5)ZF? a???????????n?E7{)?[?????)oG??y??f?f??k??h+??KM???,C?af???#j?afi?f??^???V?k??x????n?;"???n???*????R??e4?f?Y?y??~?Y???Yt?????????/??v?????Ha6o???f????m???h?2?f?n?;??f?f?a??y?????Oy;z??{G?0?7?m_?^E[??^jR??f? 3k7?Q?3K3?0?n??<l???????????#R?g??Q???( x_??O ?7L?@.t?L?H8??????????E +?\uT? ??x??????v?Y?2????"i?,B4???z???f?ft??w??3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3??????>?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4???????3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3??????>?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????#?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f???3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!???? =???G3K3?????-?L?]????7???????5?YF???W$?"?E?f4?vC??5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f?????sx?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hF3k7?^??,??F3?f?\3?wm{VhG???sx?f?ft;??fi??^?4?h!??o????~???)_g?n??y?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hv???/2?????|?Y????5?????n4?nv?5?|??g?vD? =??h?oF?s?j???m?I??f???f?????K???uf??????G3K3?????-?L?]????7???????5?YF???W$?"?E?f????&3?o??ZY????5?????n4?nv?5??w?g?vD? =??h?oF?s?j???m?I??f???f??L??|?V?n??y?~4?4?????r?4???Y?Q{C??)?????\??e?y[{E?(?Y?hv???o2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??_2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??_2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??!3?o??ZY????5?????n4?nv?5??w?g?vD? =??h?oF?s?j???m?I??f???f?2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??!3?o??ZY????5?????n4?nv?5??w?g?vD? =??h?oF?s?j???m?I??f???f?2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??!3?o??ZY????5?????n4?nv?5??w?g?vD? =??h?oF?s?j???m?I??f???f?2??6????z^??,??F3?f?\3?g{VhG???sx?f?ft;??fi??^?4?h!??o??!3?o??ZY????5?????n4?nv?5??w?g?vD? =??h?oF?s?j???_??v3I?k? ?_??Qb0??9s?)utA;pb????6T??????? ????}N??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??]?h??yNVO7???F???4????m?h?o??[hG?????????m\??H??n???2?e?f??}~?????d?tC?/j??YO3??,??&???v>??vD? }?H?????5?????vD? }?H?????5?????vD? }?H?????5?????vD? }?H?????5?????vD? }?H?????5??????|'c?e??wiy?????????X?~??D???s?z???5?????h?l?E?|;??B;???>G???^? ?@??,?U???Fl?H?/? ????u????l`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0???=???f;??3?f7]#???}f??Zs?~??`?n?s?z&N???J?G3??|?i$??5?v?i??j???N3?????M?H??|????????a)?????\??????????C0;??EI?f ??n????`?? n0?nv?5??;?g???57?wX +f?fp;??g?$o?_??y4???7k?F??Y?l????????4?????t?$????i;j?M???????5??8???W(q?<???Z???n?0????Q?.??N?%???ET)??-??!?A?@b?H??????)?=?$???g5??L? n0?nv?o?yw?4?v????:,?u3??kV"?8??V???h?!??o??4????1?t?????3I3?????E???Y?L?Qkn?????????Y???<[[?B????`???s??so??L?M{~V??$??3?f?F?wgM3mG??i??R0[7???f%R??lme +=?f???f?I#??y3I7??Y ~0?4????]?q??5??????K?l? n???H=????)???{B??jf?k#??l?i?*?Uf?7;?Y???I?n????`?i7?e7??v??S???v??[?wd +f????{B??jf?k#??l?i?*?Uf?7;?Y???I?n????`?i7?e7??v??S???v??[?wd +f????4+?2j??> m?t?lf?6?9B????y?E?_???f?ft?Y?f???<;?;#?1j?????hv?????g?>a??lf?} iV?e?2?]/???????,C?f? G?y??5O???k5?????n4k?????g?}gD;F?[?w????y?????'??????O?!?J??ZF??E4??????e??l?9?????=~?F??y???f???{;?????h??q??????y37??????????I4?Yi?Q?hv???6[?Z6? y?m?!??????4+?2j????fKW?f?!o?m?#?<[???[????h?iF7??nf??????3???-?;Z?f??a??lf?} iV?e?2?]/???????,C?f? G?y??5O???k5?????n4k?????g?}gD;F?[?w????y?????'??????O?!?J??ZF??E4??????e??l?9?????=~?F??y???f???{;?????h??q??????y37??????????I4?Yi?Q?hv???6[?Z6? y?m?!??????4+?2j????fKW?f?!o?m?#?<[???[????h?iF7??nf??????3???-?;Z?f??a??lf?} iVz8?3?]/???????,C?f? G?y??5O???k5??????n???>;?a??lf?} }??2j????fKW?f?!o?m?#?<[???[????h?ivK???DC??_????,[f????g???5n???R4;o??f?3r???O6??>??>?g??f??hh???e3????6?r??}??-z?Z?~4?4????G????/O??h?-3{oG?3?????1??????'?{?E??-??N4?9???}??w??? m??Y-?Y|_'9B????k????}??s?)??????E??5??v?\3???d????zF?v????[???h?s???????}+????Z?????Nr??g??????1??????SB??w??????kd????f?????{????'???????;???????E??-??V4???g??d?}??9?>??E?#c?????=??>;?^????????s??3??7????*?????s }??!??????lf????d?KJ??ZF?/ODC?-]-?Y????A???l?k?n???j?????-??? }v>?????af???yf???Q??-E??fnv=#? ??d3+??h?s|?Q?h?pGA?-]-?Y????A???l?k?n???j?????-??? }v>?????af???yf???Q??-E??fnv=#? ??d3+??~? +n?@??O???4>?"?:??[??o?n??>a ??????WKx?8??Z???-Eg????}~??9S??????g?t???]?Q)????Mo??Y???????`8?{;????#?cV???1???3?????s"?5g?s?:?7[:??|GA;???Qs???1???????-?~?R??????~?:;=????p?vd??F????e?c$??sg o?=#?D??j??jt?o?t????v??q??L??c? d??? ?[v?F???3??7???ftvz&+:S??,???>???l?Yu???H??????{F??8??5??:??l?(:?????F??lg?l?>???????J?+gLgoz??????3Y?? gqoG?Y?gd{??[?=F??=w???3rN?????q??A??fKG???(hg?7j?`;;f?@??>?0?e?oT?_9c:{???X???????`8?{;????#?cV???1???3?????s"?5g?s?:?7[:??|GA;???Qs???1???????-?~?R?????????:???LVt??Y???}?????}??(g??1?????q??9k??u?9???Qt?; +??????3????.?}?O7Lo????W?????????N?dEgj0????g????1?n???r???[|??9??????Z???-Eg?????N?Ps???1???????-?~?R???????0?e?oT?_9c:{???:;=????p?vd??F????e?c$??sg o?=#?D??j??jt?o?t????v?;UC??lg?l?>???????J?+gLgoz?|Cg?g??35???????????U??{?D9{???-?g????G?Y?\????????3?Q??|?j?9S??????g?t???]?Q)????Mo?o???LVt??Y???}?????}??(g??1?????q??9k??u?9???Qt?; +???T 5g +???] ?l?n????7*???1????? ???????`8?{;????#?cV???1???3?????s"?5g?s?:?7[:??|GA;?????L??c? d??? ?[v?F???3??7?y????3Y?? gqoG?Y?gd{??[?=F??=w???3rN?????q??A??fKG???(hg?S5??)?vv?v???}?az??????r?t??7?7tvz&+:S??,???>???l?Yu???H??????{F??8??5??:??l?(:???w???3????.?}?O7Lo????W?????????N?dEgj0????g????1?n???r???[|??9??????Z???-Eg?????N?Ps???1???????-?~?R???????0?e?oT?_9c:{???:;=????p?vd??F????e?c$??sg o?=#?D??j??jt?o?t????v?;UC??lg?l?>???????J?+gLgoz?|Cg?g??35???????????U??{?D9{???-?g????G?Y?\????????3?Q??|?j?9S??????g?t???]?Q)????Mo?o???LVt??Y???}?????}??(g??1?????q??9k??u?9???Qt?; +???T 5g +???] ?l?n????7*???1????? ???????`8?{;????#?cV???1???3?????s"?5g?s?:?7[:??|GA;?????L??c? d??? ?[v?F???3??7?y????3Y?? gqoG?Y?gd{??[?=F??=w???3rN?????q??A??fKG???(hg?S5??)?vv?v???}?az??????r?t??7?7tvz&+:S??,???>???l?Yu???H??????{F??8??5??:??l?(:???w???3????.?}?O7Lo????W?????????N?dEgj0????g????1?n???r???[|??9??????Z???-Eg?????N?Ps???1???????-?~?R???????0?e?oT?_9c:{???:;=????p?vd??F????e?c$??sg o?=#?D??j??jt?o?t????v?;UC??lg?l?>???????J?+gLgoz?|Cg?g??35???????????U??{?D9{???-?g????G?Y?\????????3?Q??|?j?9S??????g?t???]?Q)????Mo?o???LVt??Y???}?????}??(g??1?????q??9k??u?9???Qt?; +???T 5g +???] ?l?n????7*???1????? ???????`8?{;??????;?i?:? #?e]d?-?F??=w???3rN?????q??A???9Eg?????N?Ps???1???????-?~?R???????0?e?oT?_9c:{???:;=????p?vd???Ww????d??V???g$??sg o?=!?i??j??jt?o?t????v?;UC??lg?l?>???????J?+gLgoz?|Cg?g??35????????????{'???????#Q??;cx?? ?N??Qs?8W??s|?????w?3??j?`;;f?@??>?0?e?oT?_9c:{???:;=????p?vd???Ww?i?;Y??U????r???[|O?w??????Z???-Eg?????N?Ps???1???????-?~?R??????8:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd????(?3t???Y??Lg????;?????????r??L?-?'?9??????????????: +?:U??????(???????b??? +?Y~???S??????YOo?7vv"???,?*Lg?~??????????w5??wS????????[>O?s??7gr????????3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:?Y~T????7g??3??????0j~????;3?Y?3??|???4??n??;????Gg?(lg?T??3????T?{vnJo?Y?^*Lg????N=wf??vg=?!????Lvt????0???n?rog:??o?a??^?M=wf??vg +o?<1?i???? v??????Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ???Nd??3t??? ?/ps?{;?Y??x?????n??3???;Sx???yN????, at n?s9?? ?? ?*ps????}? +s???A?M1??K??,?w??????r????7?;;???????3t???Y??Lg????;?????????r??L?-?'?9????????q???3t?3t????jg?Q*?=;??7??k/????Q{??;3?Y?????o??D&;:C? +????7g??3??????0j~????;3?Y?3??|???4??n??;??s???Q????7g???G?0????????T???sG??z??,g??zzC?????? ]*`:C? ?????t??3????????z??,g????yb??|??9 ??G?9:CGa;C? +??9?vv????s{PzS???Ra:???w??3???;?? ????e? ?P?`\?????5(???RB??V?y-??D&;:C? +????7g??3??w??5?????????v??L?-?'?9????????q???3t?3t????jgO- ?>???7???, ????Y?N=we??~g#?!????Lvt?.0???n?rog:????k?5?S?]??????[>O?s??7gr????9Gg?(lg?T??3???Z?}vJo?Y?YLg?????z??lg??FzC?????? ]*`:C? ?????t???0k??????2?Y?3??|???4??n??;??s???Q????7g??=?4???:>?????4???wg?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?<1?i???? v?#???????Sn?P;{ji0??u|(?)f?gi0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g?]??????z??lg????yb??|??9 ??G?9:CGa;C? +??9?v???`????PzS????`:??????sWf;?w6??????dGg?R??_??,?v???????Y?G?7???????)????K??,w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????Y??`?8r??: +?:U???????s?]????b?}??Y???}???2?Y?????o??D&;:C? +????7g??3??w??5?????????v??L?-?'?9????????q???3t?3t????jgO- ?>???7???, ????Y?N=we??~g#?!????Lvt?.0???n?rog:????k?5?S?]??????[>O?s??7gr????9Gg?(lg?T??3???Z?}vJo?Y?YLg?????z??lg??FzC?????? ]*`:C? ?????t???0k??????2?Y?3??|???4??n??;??s???Q????7g??=?4???:>?????4???wg?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?<1?i???? v?#???????Sn?P;{ji0??u|(?)f?gi0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g?]??????z??lg????yb??|??9 ??G?9:CGa;C? +??9?v???`????PzS????`:??????sWf;?w6??????dGg?R??_??,?v???????Y?G?7???????)????K??,w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????Y??`?8r??: +?:U???????s?]????b?}??Y???}???2?Y?????o??D&;:C? +????7g??3??w??5?????????v??L?-?'?9????????q???3t?3t????jgO- ?>???7???, ????Y?N=we??~g#?!????Lvt?.0???n?rog:????k?5?S?]??????[>O?s??7gr????9Gg?(lg?T??3???Z?}vJo?Y?YLg?????z??lg??FzC?????? ]*`:C? ?????t???0k??????2?Y?3??|???4??n??;??s???Q????7g??=?4???:>?????4???wg?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?<1?i???? v?#???????Sn?P;{ji0??u|(?)f?gi0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g?]??????z??lg????yb??|??9 ??G?9:CGa;C? +??9?v???`????PzS????`:??????sWf;?w6??????dGg?R??_??,?v???????Y?G?7???????)????K??,w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????Y??`?8r??: +?:U???????s?]????b?}??Y???}???2?Y?????o??D&;:C? +????7g??3??w??5?????????v??L?-?'?9????????q???3t?3t????jgO- ?>???7???, ????Y?N=we??~g#?!????Lvt?.0???n?rog:????k?5?S?]??????[>O?s??7gr????9Gg?(lg?T??3???Z?}vJo?Y?YLg?????z??lg??FzC?????? ]*`:C? ?????t???0k??????2?Y?3??|???4??n??;??s???Q????7g??=?4???:>?????4???wg?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?<1?i???? v?#???????Sn?P;{ji0??u|(?)f?gi0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g?]??????z??lg????yb??|??9 ??G?9:CGa;C? +??9?v???`????PzS????`:??????sWf;?w6??????dGg?R??_??,?v???????Y?G?7???????)????K??,w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????Y??`?8r??: +?:U???????s?]????b?}??Y???}???2?Y?????o??D&;:C? +????7g??3??w??5?????????v??L?-?'?9????????q???3t?3t????jgO- ?>???7???, ????Y?N=we??~g#?!????Lvt?.0???n?rog:????k?5?S?]??????[>O?s??7gr????9Gg?(lg?T??3???Z?}vJo?Y?YLg?????z??lg??FzC?????? ]*`:C? ?????t???0k??????2?Y?3??|???4??n??;??s???Q????7g??=?4???e??uF?? l$ +?(m(??????????1????DGyoq???=??T????????q?????=(?)f]{?0?????w??3???;?? ???Nd??3t??? ?/ps?{;?Y?g?]??????z??,g????yb????qs 7?9??st???v?N?9s@??>J?????A?M1??K??,?w??S??????YOo?7vv"???KLg?~????????=??F?????sgf9kw??????????????q???3t?3t????jg?Q*?}vnJo?Y?^*Lg?????z??,g??zzC?????? ]*`:C? ?????t??o?0j~????;3?Y?3??|???4???? v?#???????Sn?P;??Ra??s{PzS???Ra:?????sgf9kw???????dGg?R??_??,?v??|?x??Q?{}7???Y???)????;??7??k/????Q?N=wf??vg=?!????Lvt?.0???n?rog:????k5??wS????????[>O?s???n??;??s???Q????7g???G?0???=(?)f]{?0?????w??3???;?? ???Nd??3t??? ?/ps?{;?Y?g?]??????z??,g????yb????qs 7?9??st???v?N?9s@??>J?????A?M1??K??,?w??S??????YOo?7vv"???KLg?~????????=??F?????sgf9kw??????????????q???3t?3t????jg?Q*?}vnJo?Y?^*Lg?????z??,g??zzC?????? ]*`:C? ?????t??o?0j~????;3?Y?3??|???4???? v?#???????Sn?P;??Ra??s{PzS???Ra:?????sgf9kw???????dGg?R??_??,?v??|?x??Q?{}7???Y???)????;??7??k/????Q?N=wf??vg=?!????Lvt?.0???n?rog:????k5??wS????????[>O?s???n??;??s???Q????7g???G?0???=(?)f]{?0?????w??3???;?? ???Nd??3t??? ?/ps?{;?Y?g??????u???!{\,???<1?)?~0?{?????9?????????}_>`;??F??bJ?????A?M1??K??,?w??S??????YOo?7vv"???KLg?~????????=??F?????sgf9kw??????????????q???3t?3t????jg?Q*?}vnJo?Y?^*Lg?????z??,g??zzC?????? ]*`:C? ?????t??o?0j~????;3?Y?3??|???4???? v?#???????Sn?P;??Ra??s{PzS???Ra:?????sgf9kw???????dGg?R??_??,?v??|?x??Q?{}7???Y???)????;??7??k/????Q?N=wf??vg=?!????Lvt?.0???n?rog:????k5??wS????????[>O?s???n??;??s???Q????7g???G?0???=(?)f]{?0?????w??3???;?? ???Nd??3t??? ?/ps?{;?Y?g?]??????z??,g????yb????qs 7?9??st???v?N?9s@??>J?????A?M1??K??,?w??S??????YOo?7vv"???KLg?~????????=??F?????sgf9kw??????????????q???3t?3t????jg?Q*?}vnJo?Y?^*Lg?????z??,g??zzC?????? ]*`:C? ?????t??o?0j~????;3?Y?3??|???4???? v?#???????Sn?P;??Ra??s{PzS???Ra:?????sgf9kw???????dGg?R??_??,?v??|?x??Q?{}7???Y???)???????7???,????Y?N=we??~g#?!????Lvt?.0???n?rog:?????5??????v??L?-?'?9????????q???3t?3t????jg?U*?}v?Jo?Y?Y*Lg?????z??lg??FzC?????? ]*`:C? ?????t????0k??uS?]??????[>O?s??7gr????9Gg?(lg?T??3????T???>~?????T???wg?;??????????|cg'2???T?t???9????,?3Z?a??Q????2?Y?3??|???4??n??;??s???Q????7g???W?0??}?(?)f=g?0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g???????M=we??~g +o?<1?i???? v?#???????Sn?P;{?Ra????QzS?z?Ra:??????sWf;?w6??????dGg?R??_??,?v????h??Y?G??z??lg????yb??|??9 ??G?9:CGa;C? +??9?v?^???g??????????t??;k?????v??l?7?;;???????3t???Y??Lg???????Z7???????)???????7???,????Y?N=we??~g#?!????Lvt?.0???n?rog:?????5??????v??L?-?'?9????????q???3t?3t????jg?U*?}v?Jo?Y?Y*Lg?????z??lg??FzC?????? ]*`:C? ?????t????0k??uS?]??????[>O?s??7gr????9Gg?(lg?T??3????T???>~?????T???wg?;??????????|cg'2???T?t???9????,?3Z?a??Q????2?Y?3??|???4??n??;??s???Q????7g???W?0??}?(?)f=g?0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g???????M=we??~g +o?<1?i???? v?#???????Sn?P;{?Ra????QzS?z?Ra:??????sWf;?w6??????dGg?R??_??,?v????h??Y?G??z??lg????yb??|??9 ??G?9:CGa;C? +??9?v?^???g??????????t??;k?????v??l?7?;;???????3t???Y??Lg???????Z7???????)???????7???,????Y?N=we??~g#?!????Lvt?.0???n?rog:?????5??????v??L?-?'?9????????q???3t?3t????jg?U*?}v?Jo?Y?Y*Lg?????z??lg??FzC?????? ]*`:C? ?????t????0k??uS?]??????[>O?s??7gr????9Gg?(lg?T??3????T???>~?????T???wg?;??????????|cg'2???T?t???9????,?3Z?a??Q????2?Y?3??|???4??n??;??s???Q????7g???W?0??}?(?)f=g?0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g???????M=we??~g +o?<1?i???? v?#???????Sn?P;{?Ra????QzS?z?Ra:??????sWf;?w6??????dGg?R??_??,?v????h??Y?G??z??lg????yb??|??9 ??G?9:CGa;C? +??9?v?^???g??????????t??;k?????v??l?7?;;???????3t???Y??Lg???????Z7???????)???????7???,????Y?N=we??~g#?!????Lvt?.0???n?rog:?????5??????v??L?-?'?9????????q???3t?3t????jg?U*?}v?Jo?Y?Y*Lg?????z??lg??FzC?????? ]*`:C? ?????t????0k??uS?]??????[>O?s??7gr????9Gg?(lg?T??3????T???>~?????T???wg?;??????????|cg'2???T?t???9????,?3Z?a??Q????2?Y?3??|???4??n??;??s???Q????7g???W?0??}?(?)f=g?0?????w??+???;? ???Nd??3t??? ?/ps?{;?Y~g???????M=we??~g +o?<1?i???? v?#???????Sn?P;{?Ra????QzS?z?Ra:??????sWf;?w6??????dGg?R??_??,?v????h??Y?G??z??lg????yb??|??9 ??G?9:CGa;C? +??9?v?^???g??????????t??;k?????v??l?7?;;???????3t???Y??Lg???????Z7???????)???????7???,????Y?N=we??~g#?!????Lvt?.0???n?rog:?????5??????v??L?-?'?9????????q??????2??b O????4????&?]????,?@l?Q????7g??=?4???:^?????4???????z??lg??FzC?????? ]*`:C? ?????t??_?0k??????2?Y?3??|???4???? v?#???????Sn?P;{ji0??u?(?)f?gi0??ug?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?>1?i???9 ??G?9:CGa;C? +??9?v???`???xQzS????`:???:w??+???;? ???Nd??3t??? ?/ps?{;?Y~g|???????z??lg????}b????qs 7?9??st???v?N?9s@?????K??,?;??????v??l?7?;;???????3t???Y??Lg???u????n??+???;Sx???yO????Y??`?8r??: +?:U?????????]????b?}??Y^w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????????q???3t?3t????jgO- ?9???7???, ?????s???2?Y?????o??D&;:C? +????7g??3??w??=?????????v??L?-?'?=??7gr????9Gg?(lg?T??3???Z?sv/Jo?Y?YLgy?Y?N=we??~g#?!????Lvt?.0???n?rog:????{?5?wS?]??????[?O?{???n??;??s???Q????7g??=?4???:^?????4???????z??lg??FzC?????? ]*`:C? ?????t??_?0k??????2?Y?3??|???4???? v?#???????Sn?P;{ji0??u?(?)f?gi0??ug?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?>1?i???9 ??G?9:CGa;C? +??9?v???`???xQzS????`:???:w??+???;? ???Nd??3t??? ?/ps?{;?Y~g|???????z??lg????}b????qs 7?9??st???v?N?9s@?????K??,?;??????v??l?7?;;???????3t???Y??Lg???u????n??+???;Sx???yO????Y??`?8r??: +?:U?????????]????b?}??Y^w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????????q???3t?3t????jgO- ?9???7???, ?????s???2?Y?????o??D&;:C? +????7g??3??w??=?????????v??L?-?'?=??7gr????9Gg?(lg?T??3???Z?sv/Jo?Y?YLgy?Y?N=we??~g#?!????Lvt?.0???n?rog:????{?5?wS?]??????[?O?{???n??;??s???Q????7g??=?4???:^?????4???????z??lg??FzC?????? ]*`:C? ?????t??_?0k??????2?Y?3??|???4???? v?#???????Sn?P;{ji0??u?(?)f?gi0??ug?;??????????|cg'2???T?t???9????,?3??a??Q?M=we??~g +o?>1?i???9 ??G?9:CGa;C? +??9?v???`???xQzS????`:???:w??+???;? ???Nd??3t??? ?/ps?{;?Y~g|???????z??lg????}b????qs 7?9??st???v?N?9s@?????K??,?;??????v??l?7?;;???????3t???Y??Lg???u????n??+???;Sx???yO????Y??`?8r??: +?:U?????????]????b?}??Y^w??S?]?????Ho?7vv"???KLg?~????????;??f????sWf;?w??????????????q???3t?3t????jgO- ?9???7???, ?????s???2?Y?????o??D&;:C? +????7g??3??w??=?????????v??L?-?'?=??7gr????9Gg?(lg?T??3???Z?sv/Jo?Y?YLgy?Y?N=we??~g#?!????Lvt?.0???n?rog:???{? 0?ZU +endstream endobj 29 0 obj <>stream +H????{TU?????A???AD?*???B@ ?&?1P?? ??4??????{????K?]%?N?0w2?????????W???Z?????????n???????[??????;s???#G?5]WTy? 'N 2??j???=;?? c??G???????n????S????????3gU??>U?U???????????j??uTTTTT???.U??????:]u?+? +??????????*c?w??????????????R?m******************???n?%????%??m??jMk???+W?lii?y?r5}b???w??E???\???w???? /?????o?E???i?:??L????????{?n??V]??\q?p?LvV?????? pU???G?*U?x}P?=z??e ??D??{???????{ +?9U??T???g?????~?????????m??????]]?< ???z??+n?????KT?g?N?c?\Gi????s????l?uuuT??Jz??1c????\sPOT?.?]4yn???s?G???+???e??G?*U>stream +H????kT??q???l2???1 $????/?b@/? ??? ?O???cQADJl?P??;*???Z???\?{??{%?,??}?Y??W^'????ff???q????????4?:??? +?~???n???|n???????51?2?E?????f?{?????]?~????WU?ko?}6??U?#G?Xb???????;g????ib?9c???r??????;Y???f?? ???>k???X]?-h41?|1???|j??=?X???t)^B????Yo? +???????Zb555?X???od04d????AKK?????wEy???? ?\n?'??????2@?29d??eC??S?=+2@???x i? ?A?w)B?2@???Y??2??.?KH;d? ??K2???A?? ?w???{V8d? ??K?? ???R? <1d? <5???!d?=]???v??"d???????xc??C?!m ?2??2?!d???? ??R???C????!O  O???p?xO??%?2@???xb?xj?g?C??{?/!??2(?.E??C??S?=+2@???x i? ?A?w)B?2@???Y??2??.?KH;d? ??KQ??`??u?F???Ss4d??!?`1d ??????W??\ww?&?2;d??9?B?2??|? +? ???t????"? ??K2? (b? X H? tCa? ?5?S???????? (b? X H76e???f?5??}? +? ?hk????"? |5???!d?=??qd??!?4?R? 4GC?2C?!???????????? Bhk????"? |5???!d?=??qd??!?4?R? 4GC?2C?!d?\? ??D??3??D?2??|? +? ???t????w?RvfTxo??#?\.?\? ????h?@C?b?@:d? ? ?? ?? ?GC?2C?!d?\? ????h?@C?b?@:d? 1d???? ????#E ?y?"d?92P??A?2? ??d`? ?GC?2C?!??? Jj?@[s>? 1d???? ????#E ?y?"d?92P??A?2? ??d`? ?GC?2C?!d?\? ????h?@C?b?@:d? ? ??2? (b? X H? ?Ar20C??!E ?!??2H.@f?@~4d??!?`1d 2@??? ??? 1d,? ?C? ??!????"? ????t?$ 3d ?2P??A?2? ??d`? ?GC?2C?!d?\? ????h?@C?b?@:d? ? ??2? (b? X H? ?Ar20C??!E ?!??2H.@f?@~4d??!?`1d 2@???,STVVZb??/ dP?2X?b?&? |1d 2@??? ??? 1d,? ?C? ??!????"? ????t?$ ?LePUUe?!d? ???t?$? ??? 1d,? ?C? ??!????"? ????t?$ ?LePc??@??!??2H.@f?@~4d??!?`1d 2@??? ??? 1d,V?2hhhX:r????2???A?!??2P??2@??0d`? ?GC?2C?@C?!3d ?2P??A?X??????????$ 3d ?2P??A?2@?2( ?!????"? ???2??Aa???te???}b??~???(bE?+?5???!?le ???2hkks? d????f/???!????"? ???2??Aa?? ??? 1d,? ??8? +Cf?+????%???@?@+i444?c?^?~??!d ??? ?% 3d ?2P???*? 1d ?!????Y?????e? ??2hll???D????-U3r?X???Uw{???*? ?C?????JCi??xb?xj???2@?`t? ? ?C??C?@C?!3d ?2P??D??l????/I,?2?~]C?!d`? ? ?C??C?@C?!3d ?2P???<!d? ?A2d08d?2@??g?????2p 3d08d?2@?2@?2( ?!????"? ?? d? ?!??!??2p?>? ??Q??Z---????!E ?y?"d??!d???G? ??22p w  q ? ????h?@?????V ? +?/??????2p 3d08d?2@?2@?2(l?d????m???k?????????hm???;v??s?2(C??TWWkb??de?????e?????!y ? ????h?@C?X? ?2@?`???{??c??c???Q??_m??????n??????7?lWW??_?>}???cMMM??!dF[??bd?l?2M,?w??f??ep??*f???}?41d ??????w{.???????FCC??!E ?c2@? ??C?!d??!d ?!??FE??????{?.??????g~??y?f??U??j~??}{W2W2?WHeP?????`????2??? 2?q??&????3??]?_?e-?-??v??x?7@*??)??+++51d??]???'? ???f d? ??GC?2P??2@?@?<2???]?v%?l?????'?????? ??1?uuu??f+?????R???O2??w?'? ?_??/???2????????gY+w\?vMs???YU?.???M ?ccN?\NCi??xb?xj???2@????:u???a???????????5k?|?????Ed????M?X????x?bKl??I???? -?)S?hb??????O?????7??5k?&???_f?={???s?M?u?L[l?\Ul???X????M?:?[?`?&6y?d??????&N??]l?? ??Z?7??n??fo???_|??Yo??i??ibc?n????gZ??????~?W?n?V??^?p?*f???,Y?????E?% 3d 2P ?f?!????? ?C?`???b?@>d? ????i????~??????D?c??? c?1?a??1?c6*2?????W?z?*???x??E???~???????????6n????#????o=z??w????/^?t???z???8???}?k??[?|???????v???+W? {??Z???%???}?J??Lj%?Y+?n???????g#?^??>? ?"?R??g?+?;????%??E?2(?2?K???hc????????@ZC%+??{??;w?\??????7o???>????'???????m ?????-?@@?? ??@?1??1?hJ???B?(( 1DBE.AJ0?1@?K)?@@?????q??6?s:??v????L???w??^?Q???333U??,//??}????tb??????@??5?bC??*L*&[O?s-?bf?N?b?????? +c??ZZ??~?X?1:A?H?H?_?z?%??k?N\:?jt?3?3????p8??~?_??t.--i_????g???}?G?????~Juuu???Y??5?bS???L*&[O?s-?bf?N?b????L?k?O\:A?H?H?&?l=?T?>b??k?N\:?jt??3?i???X?W?y?^??I*r???5S'.??^5:S????????/+A|?x?b?????/?\ ????S?X???l=??Z8wY??@P,?+??I/[/?y?X??????@???)?? ~?.(?i????????? w??????????crr2?!bn? 9??????8???f?EE???p?A???[SS? ~ %%%33??*++ + +$?///K35qqqCCC???Cg at L??!? Ag@??@g`:C?r??loo??^?7?q???????)??##??????===Ru???? ???WV6666;;+.6<,V??"????y*LA?????x?J__?hh??>q????(???U?7o?`??n?:s?R?ZY??Ahj?????E!?7?B??????q???#/_?4????????? ?'???~????SRV8==m?>??PD.?x??).??C?????????_L?X?V? ?H3?[??????jA?>??E-?8????@C?S{CCH-?>?{? +/????J?? ??*)?.???hP???L???V?U?R???=?=?cQ??=>?M??(??xL`?m{%??[a0 ?^? +?N?????'??`?????i?(8K\NNNZ?"?7hA??? +q??Fw?(???~???|??!!!bj????????????? ?Bg? ::???3? ????3?3?::??Bg at g?:?_ ????r?????[????&??!*????}??$r??Q;?"I ?~x ???????????? ?q?????N???,??????8????\?Jq??????<*]+(..??S????ALg\??WT8Z>N???E??????)?N???t`L L? E~???? ???s?n? JYY??Q?(?9?q?':;????????????,? ;???XTT?3.?'O??????l6??`??X????????????????w?????z ?t1r????? ???`A?X??_?~?[ %?E?????2????=::?G%??'????}????[???\;???rqwh???7o?d????R???n"q?j ???a???S??8?L//??~????+????:??-VW????4??&???<88HS644?????????"??4ni?^???????`}?B??}????R+??d??????g???h4?%?J???4??Q??????????????y?[VFKA7L???n?L?c????C ?~??1S???????;C,F0iQ>J,??0??[???mm????N!?t?I?R?;?w??H????1<`o??\^?f?_?!?????b9?s-:????##?T? ?Y?]?S?????n at -???^?Y???sn??f???]?t???e?>?F?Bn"?[??1UI????????/????=4v??L?&??Q?*g??[?3???g??gp ?30? 3?30?? ? ?38?g?.3? lg? ? ???????R?(>? ??a??_?~?6???U?O???J???jq??w?VWW??e??? ???s?Y??}}?100 at FL`? u?,??M^?X??VUU??????xN?$??r?0????????|??w?? ????:????Q???wY??f???p?R????e????????`?K?????1|,??z(????"d{w?jj?i???X*?r?C\? ?3p +? ?3?Og`?A:g`?????f? 9G?|>?????It???s?drr2''G???9f-%??b?????D>?|?????f? +?BM;JA|?2??Io??d??6 ?WN?> ???p?1?$???8??=M??~{??/^?]h?R>d?F#?????Z]?kg~Z$??G?d??????7o? 4???I?(?ttth????vh???????????????R1!????s?o?'?uy-?z??????*#?T?w8???????q??a/?? +?~?T??"34??uAL$?R?Y??O?x?>?gPP?'???'?+W? ?????9?)????`)??8#???<7e???P????l={j?p???>?`?`???????yK??????AD????2BH??U=???D?????&3^????????[?^?p?d#??C???_?x?????%?C>?????e??????l?a???:????-?m2?l?????q?5?:'?????` ??5}>(?????????7l??????>?N??6?e???/?P9???T??$57??B?p?J?hf???O?1?16?o??>???(??o???????`0????D?A;;??0????Y????K +?`?P?? +????q?J?w=?????7??&t??:x|????1B????7?#??r"qj????_?N??)?A????H^?0Ir????A????T=yBa?????oE??D??TX?;?g?,3????av?`z????~?eWW?R?w6??s??]????l?A??&? 7wC??~???db|???%?V?-???vw91???D^^kk+??????? |? ?=*|?QQA ?m?%sY??}???T?k???9>????,r.??,B?????XT?????J?b?E/9g? +??IuPn ???X&Z? ?(?` +??FX]??>??/ ?oY?M????? ??#?}?d?????D>????kN?8? ??8'0??8??0??8?}0???? 2?8? ?q?? ?3H??3?P??'EF??Ek?-:?3@??ZXX?lum/.????]r&.@????N?????? +??p???? ?~u5 +?vut0??5?????i?????.e?}?z?###KKK>m???fJfeeE)M?????c?????????: ??9??A?r?x?F????#Y{???M?? ????_Su??g????~\\\t?????P@?3?"????(??6r455?~??(o?^???Rb??f??]c[? +A`????sZ?????y????j}kk?ahR?? ?!mf?s??M?,--??b??x +v&7???i???N?FC!?'?D?z??2??mV?U?aI\?>P. j[lX?G|e%N???? ????n???`??_?Z?8?????i?8?[??E??MRSrNlv?h?&?B?;??DE?f???2?Ln$??:?`??~:???????Q????s???t??E??Gv??D?q??k??????t?0??????????????D???5q???y????????????O???hCw0???;::??KKK?9???4V??H$?u"2 ?)??? ??4???TJ?&fgYc??UUqqJ?0A???rX]???%???i????i?????r?????????#??>?????g????76?&I??? ??V??8?:??o???????A_?????K??_CC???p?#+t???????)?$?N?R--???E,??.iA?h3?2M?U??????VXe?VXe?V|[??d[)??I9???/?`??7?UVXe?V??*? R?*????Y??/?????{????????????!?b????? ?c?`N\Y11Bo??u??? ?Ty9?RP? +?>y?1?A?<}??@8XZ"R???????uu:???^??????{G????????{????f??:q?Dq1?????Vl???:9d????????'???? ??`Mc??sII?p$??Y?HC|??`??WYIG?????`ff??L[[[SS?"I?$'???`a?blpp??7s??W???????a?ckkkQt???gn?k???U??5?????7A`?K?M?Nrf6????!|?O???%??????Iu?H?F-"%J?,C?????EF??h?4] ?B?B-R1????>}????V????9O????s???-??A???????c@?????o???'O??????_?)?Y???p~&???K?.?U??33P???62??!73d`??? 2?? 2H0C?? 2H0C? t???!3d?? [????;U;3R???U????#Y&M?~P?{??m?,?s??;r???7bii????"????????K>?M5?c|?????q??X?!??!73d`??? 2p5C? \????7C? 3d`?`C3d`?`3d?_"?? ?ldd??????|i9#S?:MD'wtl????????vww7?" ?aP?t8? +?^T+?!???%?#?q1???????X,F/??k?X???q?%h??imml???_Z??????:?D???M? ?????? ??]:????@Va?65']?/_?????Z???1???G32????/#??%,? ???p?2???|??>??x0>h????-[?Gj?i'H?????7w6?????}??;?_??/i3/;A!????48???2?N9?qa5/q???BKL???????t3??3[t(eM0?????C?:??WW=:??-//,,$??Y&- at WI/?9???????F3???0m?s????z65?b?????????Y?H!v????[?? +Q???????X)?LNN??P?????]?v?*d?"?g????j?? ??!C?f?????f???2?? ?x3d?m3d`?`c3d`??kf??????P????y??????]?Begg??u??????U?? Md????s???????:?d??"4a#?>?!???????!???????????4*?CQ^?~?fP$??fj??? ?;w.!???rk||<'??????>N?f????N9??????????G????Y??a8Q??Q??dRRt???Y?C ?h??T?F?w? ????????9Ba?f??)essB??ahhbH1o????,??H0x?$???$??$H2?-I? ??$???d`]? ?E? $??$I?H?AL?$dp??Ev??ph?3????)??PE????????$?>E8[???$gW?)Cv??G?KE??%''C?p??G??[??^ +X?h???????????Hv??`???0eD $'?Y?#F?#T??????????(???????,????HQ233-?KY??z?kVl?yc???P???bU?9q??????????? ???Fzvcc#???WV?????vgee?\.?pdP?? ?B$? +??#?O<????.??FD?????p????W?R?GL?m??+?T~>?????4?>T ??X.???????sF}}????O|??Bb?,??????????J?#:99ifn??q???9??xT?{u??????H +?s?6?????C;??R?,_??? ?d?SC\????gQ???kk)???[3??X6|??H?Na??^v:??N|,??9m+R^Q(?????l?@@?I????? EN5Yy??q????N????`E???????????vp????qX?p?6???Hl?????X{v0%??3???wQ?H?K??WWW?>}?%XY?oQ;fq? w??m???/?y?? ?B>?@??>2?SjkUq:$?FGU?Q[ ?/????4????)??????[??"???JW?\?,??/_?\RRbd??e?jllL?N???VVV???|??O?n?_?nj7?G??eI2?#I2?dU? $D?$?$???$??$H2?*I? ?H???;2???~??'?i??????O>z?? +??T[[K^??z??f??TmWWi???DF+????? ?`%##???????A??E????333???????^??J??K?a8???p??????? +c + + {zz? ?K)Jkk??????$?Y????!;;?ADc%?`U-(( ?bG?N3A a?4????,Rt?[w#??f?#?0 l??q#z??aMM???S????1?a``???C???Z???E???g???`?~?=,???? ?)M36?$?H)??????v?ict^?]???vi??H;KQ??**jE??59????,af??????????I?* +;?j????^UU?z????se0???\??gG?L??q????\.??#7D}????? ? +??hi??pX47/ v?????`*.??d`]? ?K? J??I2?dC? ??p???b?95?%?????????z*?jnn.??#C$eC!?P."??? Q[?????????c???#z?*!E9|?D$?????????$??d??dK? $?$I? LK?A|J?A,I2?d`X? $????'$$??>]YYI?,.z<vO?5?n? ??????dE??i(???zJ??%???????'NkM??(????f;.???t?? Eiii??????AlNUU????=J???{??o??????????z????M??????????????????f??W e&???????d5b?0 C%?qD??k?H!je???"yM?t??[????}????sF?{??u?? ??r??Y??>??al?bq8???J?X,??9p??8????[TTT?H(??r????! EK?6????V???D~~????&?';????????Z5^ +????C ?? S??Bfl?????k7??????????????47G?`!????nR????/@u?????4?o&? *0?????N?j7? ????x??s?I?E???????[???? ????B??U_fZU??b?|mZ???9pf8QQQ???7=I20(I?X? ?$IF$??I20M? ~L? $?[? $?$?C$??L?#? +2a???8???a}d?v????A?????S? _n????.*?????l?7<?^/A??/?Z??x?Xyx-??????}? .?????F&bMM ; ???????A++??]???f??!??????R??8??}??7|>A]]]0???(???B1?z{{?y?????4#/!m,??^?t?Y???LLL$%???IKKc?Z[?*?J&v;?j?X???#?*%%epp????D???????tvv??cc1???9?rxX?? ! ke?!t??]?Y???????y??EZ??????p???O?T?t????Ql?8w65????^???eee?:???4???????;)y????Kyo????Q?&''??????w?s?????7?????~????N?3?q??2+???9 ????JJJz?://??`?o?????5-9????xT????q???l?l???@dd?B_j[, Fqyy????????M ????a??*?????b???!!?%?5>>?P#????cm? + *???$??d?GI??"I?J?A?d`?$??$)I?$+IAH???? 23CCCY?g??????>??/_?? ??????????(,,??677?IN*????;?9:iWkk?^??o?[???5????????? QQQO?>w10?/??p|?/G???47?????KKK ????????!>4???lX????y_?????V????????r,? ?n7????!????"n?d,??t?@??7o????s????jg??????x##?? +\'''??????@???fXh <:?j?Zl???8F???$??PI2??$?@????d ?@?$?'IR'/I? ?$I?$??T?|O?<1???RSS???r?????????????Bu,,,????1Z[[??\?R""px???\OIaU??99uuuFB3???=?(????$>??fq????/_?????eeeqqq??%\#Nd$?????bRRRkk+O +?A????????:??z??????W7n??Y??1kK???JJ???? ??d#?55/^:????E????x?O???ji!??s?3!!???QQ?P???>2? m?????????a???8q???,BB?????Z_~??????333? A?oF??t?An??????????1 ??G?b'?^f??z$????.????$I?$?@??>I20U? ?~@? $?$I?$????????Xl????n??8????CB?????Q?Hs4???????+?er???[? ?sn????????????[[D?????p$UXSk??I??;88?;\AAO +?d????/..rya?/??(??X???+9?n?S??{|???????k,*?`)XQ?={?;??=,,,,*L????c???????3|'&??,X?????????d???]???B???N?Z??????=N?D??g?2??~?B.? |M???wCB??3????????Q?7????????ir??~4??????s?b;SRR??????T__OR?8????lv???;?O?AI2??%?@??1I2?d?[? ?k??TK??????????????? +??????KLL???????}??????]??>?z{??\?u??0??? 6ud??w??mllLKK;??D???A0?g ???&YW????q?+++???j?/????+)?/^??bpQ????O???1 ?? #???`??~?78??%?(???W?\?u????^???r3????????a?????{~~?????\b????&7?Vnn.?wS??f?u4?????!3?? ? .??x??H2?8?????2???????U?T??????zP??+?Y??????????@`nn.V???????i4b???7 kk? n?C?3?uPn +??????6???j??}??? ?2q????Up???T*C???????~??????`??8??????R?uB>????$''s????q(###???]UUu???~?f?og?2???x?X?? W .BP??nah?Xd2?0<< AIL?2Bl?????q????^G???\8?/???3?y??g??????????H?W&SZZ????z:? ???dd?Af *ddR?? ? 3 3?2Q!3 3????MM??????v?\}???????f??v???0y?"?t~??q4"u??=????U?|||???????bgg? l?6 at f @f???.? ? 3 3?N? ? ?2????dd? ???J?_??x<dYbbbh +?,%E?cN\???!?R?q? +04]??????{6datt?????_>8p8?7?Ze? ?????Z??????????v o?????^>j?I?lll,???? W&Srr?^?7?L?????????kkk?B????p]?o???????????R?~?? `OoP??,ddq?????\ ??!3????8??????{???????????c1???(V?ux?B?6?P>???P?.??????????*?? _?~?,?l???,>???wvvT*?X$????A??y???M????|???????>}????????0??????????]?B???H???? ??'fP???~???E????????0x?C?P???(?&?idd%??Q????\???QIa???rv???(g??h?x?6??]\?{cccJ +????!Z?L????????NOO/????ikk?????u:????2???????????+??h???G$????!c??E?n??????}????g??,N??b63??? +3gC?6?????A?6 ???&%% ?????ONN???? +?W\\??%s????u??Cf at H22??@f?/??d?!3 3? d??A\Af at H????j?^?????h?????????DLE??V??P????By???e??P(????9?A?4?fC ???l?!???u:????!47????h?Z65;;???,z$z=???:???R?D???????v?c??????r?????8??;:::<<,?`??T??R??????806^F@??e?"??%???B@??e?"??%?????2?v????%7???`=? LLL??{{{]]]?OX?D"! _A?-?Lz?^???????????????????S??r??? %?WWW-k~~^???Y&???wzz??dGGGF?????I|?,o??????rZ??rY??????? ,??5Q5?A?Q????fggg>??,?F,--?????q[[?????&?^GF????y|3M???gD???dpK8??)?J?M;>>?H? l>stream +H???3h??7E*?4(?8J?Q4M????#??q?D"M??QB????-????c????}?W??3???h??????[?w?6???w?????]????F??}o?????hw??Q?zZ?x????????? +^o?? ?^?Vj|??[???t?|s???????mw???E? ????wA?\ ???????}?6W(<]??<]???2??$/O?>^??y?4???=?2*???+]H?73EP??s&??N$ ??1?7?H?????N?w??O?L??vn??n?FOn??ws$z?i??tZ??2?p ?6????d?qK???%|=>stream +H??[L?q?/ AP@?/)?AE?.%??7BeZbj????? ?L#?u???if?V?\???Co??Co????=? >?????????{??`????M??? ,:zT?C??s]???k?n?U???Sv??#???a?? ???&tv? +':???;?[? <?-?9#??G?????p??=P??^[??k??n?T???B?????2????= h???M??s???????+*@M? ????rP???r?i9m?e#???Pj?????"C??u8??EaF?u???????<}%?4)?INB?{_?a??4h????????.>:?B???j?b??B?*??H?!Cb?????eP?????A) Ed? +?b?"!??`H>??$A???|??? ?a?,????????jp?l?dv?R???I"??xs?? ???????M@? $?I??D[??s??$jB??@N?xGbI Q?h?$?I?$ +A???P??J?DB?DDB??? " ? Cx$?p ???1?O??h?? I?z +endstream endobj 11 0 obj [/Indexed/DeviceCMYK 99 61 0 R] endobj 61 0 obj <>stream +H? ??6P??Gdd??H??22???!#??????_????~???U?|?????????>xo?;o???+/???3O=??#K=??}?,????n?w? ?????+.???Y?w?Yg?v?I3?M?t????w??r?c??g?=v?e???f?-Fm??F?Xo??f?%? +endstream endobj 17 0 obj <> endobj 22 0 obj <> endobj 23 0 obj <> endobj 65 0 obj <> endobj 66 0 obj <>stream +H??WiTU???????xL?{/????2(? ?Q?8$Bx?(??[?w7??????V????|k??;???????`???=?w-?[????7?]yz???X?? ????/o}???'J??m??????#?u???????????r?????????j???7?u4???'??Z?`?????<-?6 r??@?<?????????q???b?X+???I??u?.??|\?T|????0e?2\????$???a??YlA???l[????]??[??jO???8?c??_?/???????????]?????????vQk?[??;o$?]??^??>|???O??o2?~??+??CB?f0l??" ??@$?@ ?`D!C?X?a(?! HD?c?1)H???HG2??1?Fr1???c&?Q?!?????BL?4L'W????"?Ul?2?O???? ?8H?}HV???8J~??2??j|?8?r? ?Q?JT? +5x ???:??s?@#?l??(???? +??????S????eF????q?UL7y:??D?X+?qA\???iZ#\?5f*-rhfHq?HyQ?`?K???A?[?0ar?"\]??$?? `t1????2??cY?? ?HM?N????DtrG?? 9?m$?kNyqKZ^a?9? ?????q?`sh?9$U??w?????Tic' +Yr??????(""?"-J?}\,F???%?W?Y?WDwDwx?EE???3f??{h|\?????b64!~??????KL??]i? ??/WM?I??$?\){???M?????Vw?????$??????'??;??H?;????k?r,]??[?n?????0,?[9Ov~)??_????v?nX???????? ?;??(? R*?Qj?D9r!?????????}?"?D?*e?????B?F?.DED!D?6_?W???T?a?5???m??'H?N?b^?*?????([?e???<[??4[????O[?D?C?N?K?w?? ?}?%N0?????????Bi ???M?S?J6X????pg?&Q??V?c?n?O?C???V?w??`??Axs/%???????????*??????#?4)m?4m??????Z???????3??`N?z?U0?*????}Z^=??q?????g.(?3?v?*]Q;8+q?d????MD?AQ?2G3?L?s??B?0?@=F%t>i??Y????1??????f???p? `????s}#?D?$?H???N{~??C??-o????h? [?W'???t??7=??KP?wA???@m}D?J??"??m?k??Y+??>_?6?2?ve }O6????t???`????????H`??)Q?!?????????#?;!R?/?}=?a-??\?~i????E???I???????????U???oo????e?3?????????g??S7?????3????Y?? ?wg???o???e$?}???z?S7R?????Xk???VX=?? +mbZ?z???QXu'?%P?d5?E???A?QC?`?j?UK=??9???)G??-?=????aO=??o????Gj ?_L???L?;?J?W?^?Q;?j?z?? +c??#?8g?p????iU=b??]B???z?????fG???Q????5?W??;????%s`??Q?PQV???k???????=?????sC* R??2KR?> endobj 67 0 obj <>stream +H?|? \????Z?????$?J????QiG?T?>*:4??f??N??d?h?p?????DN?{v??????Z??&L?u????E??H?A???U?@?U??@@??R?!???AP#t??7???:M?&^2???V?[?4ijk????C???Z?qtrn??*k?n??????W{?:t????????~????G?? ????????????>???/???`?W???f???}?????  #BF???=&|?????Q?'?L????_?+???hD???E]BQ?I?IEj??F?? k??&??l????=5'?@-?%v?N??ZSr$'r???B?$S"iH????????I!?#?$/jO h!???h1%?J????Rh9?R???? +ZI??2)??i5=b??6??g ?b????T??e????md??f??mc??^v?a'?v???ev??b?Y??????#???zJ?????^?K??w?~????H&?!3YD?@F?vH??q????G???@???{?????????????Q??@??9!?yF??LD?????????????.???{?>?|??? ??Ry;*\??iz3?'C???:K??J?????O??? S?? z????)O?^y????z?<xN????[??)?WuPU'????k?3P?@W??@l???w???@j?????yX}??W?? gp +d?? ?M]??????:?t2~?t[??(?n?L? +P]T?z?3?E???Z?\2Y?~;? ?-?3?_Y?o??????t|?F;?r???a?? ?GP??P?/?? ????m? ?(????Eb?5?*?`ZBk??"??????u??u??"1~??TS???A?T??? ???????1,?e?O??c?F@? +?P +?r?g?E?? }?????,a?1V??;-???y?yP ?y3b?9??ZA+??r?????/??vQG?V??~?AC???i???h ??E}???*!C ?1%P??e*??zX?i$?@G?^???#y?i?i}c???J?W?j?????w??T??!#???]8?\???t_&Z????1?ah??S??R|*?Ug?V;?N?o);?IY???\?rC7??7???????x??V? u~????"GSP??j?D???~'%??????.+?#v?9???k{???iUSSj_;~!9t??):?RE???b\M?U *?}?r??EJ?[? )*?8??G]F-,??????????????V>?S???pdt6P?J??i?A??Q???~w1B??Y&???2?????2??jw?]??????SR?*??<??eyM??j??;*`?/]?Q??m????????` ;???? ??&}}M?z??cK??u:????u;?????;?]' i2? +?m+r??v??]x?]h?qw????/??Qb?H?`??b???X???e??2N??se2?=G&z?????D??????5???0??;K??Q??3?D?Q??5gvk??????=??c 42G??g?????ia:?O?L??????l?mgmUN?????? ?tA ????'?J?h)??a????hTE?Tv?K]?s??X?pC??&?????E?0%?`?????????`x?X??????lE??F_??2K?Q? ?C?JE?X?Q1??-C?5?6A-^?DW?Hge-?R??F2?h??G??fs??????S???p????J= ???j?>i??????.?kac?u???;ao\??:#?@?>????M??ND?z???~]?d|% ?[?z?Fm?O??}t?3V4?;?;???w0zvpz?P?? +?*????@9???K_??r?UtV???8??/?C??G4??1C????????:?|??) ?Z?? +?K-??.?|??V?LP?? ?WC??S???(?a???@?A??LH????=?????-?k@????:u?????????G?6??[?????E?f.M??4n???+???}?N?Z/9??f%???:?V.??Z??(???b`4??H?????G?Q%??1Cl??v?Yk??z?X????C?K?Q?=*Vk???5?f???S?????0?z?Jc??DbcK3?J???J??Oh?:k ?s??xF?? ?m?60????6?W??vo at 5|"Z??X??vZ0jy????m??k???[ -?I???{?Xe??Q ??n??vI SDbM1????HY????????vF???~??x ?.???p?lPW?% +??#?e?x???????g?y??/.????s???e??=U?????{?????#l??h2? ? J ??9_R???a?PNjr?"? ????\g?j?]????R +?d????c???????m??{^???n%?'?????? ????.?R0_?Xnh??,?_= ??7?z+ju???.?????????u M-h?ys????A{?*?U???)i??#-?Q/?q?5|I??`?6[_x????sV.\? D?$???p4@?/??0????D??N?$??6???SF2????\?:?^??Y??k???? +ytuV?|?K?????bJ?K7?????????????????{?ZS???e?K??????? h?2?q?;((??^?~?|-P???i??,5'G.L+???Y??1c}N?zyc?\??*G^%???J@???? +O?#{*???qy(?????W.?WH??????Y? +?0???}y~?????j*?2??6??????????v????~??e????+?\.se?C????W_?????l8Y??????zj8|T?%???j??[Ek^YPl??O]{???]??w??n??x?F???8:?????{????E?4?[???F +ziff??+,??.??4?????K:???@?? E?+?!??v??*???EjC?gP??<)ot??~;?2????mI????U`}??~??A??}cp?^???|/?6R?1?Q?aB??y??>?? +???????B???;?>H:l?>??0? ?3d1??S??i?%??q?C?A?&??'??F??1??:?????f`???p? +?[?? ?t?+ 2X??D?j0'??&0?f??cp??x['??c^l?'?y????????5?5hx??q?3?y?h?i??@?@I?9????\?g]??{????KE?-*pQ%?F?????s??n??s r????(????????A?1?ku??~@^??E???y~??s?u?Xj?}u??8 ??B?'???A?/?/I??X????EN??)?!??$D_#??#?G??@0g?Q??\?{P?#? +?07?? ???j ?=?? ?=?E????.??$F]?Os??K?o??:%??1??9N??'??c???4E?X?o??d*7q6b,????e?pn1?R2????9??.???}?&0????????????????pF? ?:?s??J? ????"??,??????%?-|?????L???#?q|F?9?w_??b9??)????u$??j?(Go%??1%?????????^??)o;c?3??}?/\??s?{???:?8?x???R????{T+?'????????=?.?r????.??+?Wy?u???D_W_?/???????(aW[???D? t?q???]9&???GtP] ?5????????g???? ??z ?+W_?W??y?8??m^{??w??? ?N?? bG?W?<+??m?pf8>}-???un??'#???"?"G??? ???;?? ?????????!?Ow?@?Y"???6w@?]??_?????6:??8???8? ???oh-???):??tD?'!x?(S??A|??=?O???????????? b?????JICI???2X+??*U??Ai?\?? G?????N?!???c???????v??5m Da?PW?*?J?u_*Z???^????|????D??s?=????????.`??@Q?ZgKb???]p +?v????CF?????4?Abl???????????}?'?e?b|??d??q?y??,??,??]??m??k{o8?F????K#??i??@???!:?~?????????F??_P?Z???z?y?I?0????38??V???H?????d????+?=?>????>IA5NA??????O'???y??????C?|?????I?J??1>N ???v?XMD????y??{?"^????X??4b?Z|?9E?h'}?L>??0*-??bO????F???G???&???? v? )w?Y4]mQ?@ P?? Y?86???o???}?L??{g$?_?;f]&b???g?????i??i????L/?l???LU?r??????? +?Bm???????]????~?F??#?\('?R?ao?3??Yt?????x????|?1?(X?VP/?+U?=?>rr{????U??>?yn?m*????n3? ?m?0??????d?????ce=??q{?OU???????i????&????=?K?m?i?@? +0?#???v?Fx-?j??DA??>"??'E[?^??????mI=???]??~*?????8/>??g??{?-???Y>????xWz???????w??_?8?? +a?'!?{?Fr????y??"??F?u???c8??B?D??%??.?5@?6?: ? u??Vu-7???aOK???s??Q????'?v????w= 6B`?u??!?E?w??%?IE????|?8???Y?????w? ??L?D\se\?=?q??2 +?"?{4>o ?(???lK]Dn?^ ?6y???? 8s_U??.B?= wY?y??gk?3??????7zG???:??! ??? ??cp? ?.?Qr??7}??rGg???j???I??4;?e???F???#??-?F???????G}????g????d?w7 +???j$?????1d????????JS?4S??(?92?3??FB??Z)???K???$?K?eq?@?K?}?u???????????mW?K?G ??????V??2??????c->"????????7$?>?q???\?n?"???[.???w^T|??v?vP?/l??F???????????????????????????Q?l??/??"d#FN???=???MTH???I?]??B??aX +??Z?? ???3t???m?? ?N????T?|?';c? +??????P%C?T?? +???m??:?QW??VP????R???O ?^?gl1?K?)????o? ?CCI?+?M???[????F?Q?%?>?({2=?X???J*74]???" Y?6??!&[???f??? ?+???F??????|*/?!?mI?0=??x?????O?.????? +2????[????????_??????|5??@x???`??/ O?O??????r???iq#F???_/o??#?7??"?-??h?t><>??????n??`????{?H?+???|>??i???r?????:?Jx)U?Im??*.x??????i???#?v\????f |<|%5?e?l????Aq????Y????sf??s???8?_("s???n?D?B}??L3C???`??????R?p  ???}1u??k??Eb ??|???ar:|n? +9??v?????d??????????x?`?8?G?iz??R?????}?Or?1??]?{????>?w????B?t?'?z?|Y=?x\?A??I9?_p??A5???????q=H??6?????=?-;j??bV,????5?f?L????????? e????????z?*?o??>/??Q??V at Y?C?????&[??X???u?:^??xi????O?O?? U??>? ? ? ?DAAA?(R? +?????"?| +D?@?@?@????????????#?a??A?D? l6?? ?&""""FD D D D????&? ?$?a?0A?D? L&~~~~"? ? ? ?D???T Mt at t@t at t???????????+??v? m m mB?@?@?@???????{?^???6???iv???;?????@?U B?@( +?"B?P 5555"j j j jD?h?V M|?M??/????b?????'??l??S? ?????f7????+,H????L??+?C?1???ABw? ?!dP??wh_,???!c??kl?? ?c?!????w???????x????t???}N???"(TK?y???}v??b???|????a?o???a?<"?????,.0q??????????tg???|E??v?f?? ???? (?AQ??$??????!??4?:???????????f??yB'?=pC1X? -?????????B?5??+w???????kr??=W???uC?`???_er????G??z????~??o#??+g`7??a$??;??n??????< ?v??c!}???Eiz????&&?t?;n???_?'???????????Y-??= ???CpR????C??J?W?n????X????????h??????-J??fK??_??2&????$??W?E???vW^??z?,?qS?0?Oq?+??Z4?????-C????????7??+?????? [-???????6?3??e#e?5???cc????????c?????y|??|????a???^?6?G????.Q?d??{?????"???Y???^3???->p??Hq5?e?K)?j$?2?/?x$???e?????(????c?????uv?Q???9]???g??%???%??`&?%?k?;[???\???b:?????|a,? a?3u?')?k?? G??6? ? ?)??u\c????I1c{?g?b?c??? EL??(?L?? $%B??B8??h ?)?:?z\?_3??jEw??/X??????.?J`9?Vi?1?{H???? ?l7s?i???iw????????9'?_?n=???;d?sz0G?J$?N????????:W?NP??z????????]?zS?.LL?Sc??y????N??N|?a?;??c???tW4M?.?????h?u?????????u??<H???e:(?.d???;??*??z?5+????Y??4~?w???#Y???|?Q?TJ??DJ?eUx)?^???R?y??:h??????#???:#?/?t????=??.C??5C?O?r??????| ?w?XqO??W?Js???/???????T?{bj?q??????QT?V5Z???Z???u??FY??R?t???#???@???bcZ:???&)qMW"?|??i??w?????[?Ro? _>? ???w;#???C??B???G???3 +endstream endobj 62 0 obj [68 0 R] endobj 63 0 obj <>stream +H?\?Mj?0 ??>??3???)t e?B??i??Jjhl?8????g?B6?$y?.] ?;%?c?1DO?????S?????+7U7?,$?????8&?? ?????????G!??#?8????A?k??8c,???8???_?? +v?????n?a\??d??l?P???@??eF?????0?oK???[)???Xi?ueo?} +/ ??n%?t?"5?(D?-? L?O? +0#jr? +endstream endobj 68 0 obj <> endobj 69 0 obj <> endobj 70 0 obj <> endobj 71 0 obj <>stream +H?j0?? +endstream endobj 72 0 obj <>stream +H?|QAkA?1???QXP??!??m????R(b?R%1V???;I??0;rR<R*?????'?j4??????A??ftf??s??}??}??F?Cc|??bu?|????]????-Ro7]f?)yZ???yy??i??UR???h9#??9????q?????=?2?Y(????8???d?#?y?*mi?z?Cinnf??l??'a?X,&8 I T;!'?!,??(s9??a????"FB?????????A?Z????8s=????@??????h/X |??\'Cp??]h?e???|N??;-W?#?w?F?|(???J[v{C?c?5N S??????%?????????n)P?aH?3[?I,???????H???x#J????ZJ?(9{??????J??jek?>stream +H?|?wT??????!@?*!?B ?&??{?U?? ????"?4?PTlHU? +? +J ??;????s?;o?????????9; ?%i?u? V '?k?8{x"i??????+"onn P???/???????^??????E?????Eq?Kb??\)?TJf?c  +S|?ICa??X*?????c*]g??La9????????c??"???????????? ?x#????x?"R??? )?U?)?`U? N??j????{4Y ??r(? {<??m?k????:?4?P,???lR??\???3?G?o1 2V0E3CQ?-?A?*?x?5?N?>???Y???2????????????x?E@yy$8???? 5??#?$?$_K5J?????dm1?r"X(v?? ?O??B?b?R?r??????????&HsEk?B??n???[?W at L??77?5r267?5U5??KZY?Y!?a6P[&;z{:?????????????????????????/???Wo??i? ?`L?R?V?a?u?)02?LZTQtyLu???'?= ???g????R?Re?4?-2?2??2?/?T?6?????/??@W?{} _l{9?$?\\z???|???????l7Q?J?*?????[??Uu???g??2??/??????????????????B??*?1????HyAf???Be?Q"V?rb,b???w% ?H?6284;zC?=???Ul?I???????"?????r?J???xu_ +M--4???L8?^???}??Ml??6 ?7J6?6 1?3?4w?p?????6?!??4?UTU???5]?]????? OB?3J?OIN%??K????D?f?`?s(n?????????;c??2?=??'.j?<0?ujezo?z?e???????7??.?H?9K7?.?Z??? +_?\??p? ???.?????7??~?p??????*9???K??D"-%?U?! J?T?}L????R? +????? ???????9Nsj X?\???? ??Y?H%AV???N?+??"??1f?E?^?j?T)Oi?? ? +='?????????)??* + ??JM?e*)?Ajv?x Mn-?5?4aP??N?n?^.1Q?????????`?l*k&b?o?a ???Y???n?m??;?;R??T?-??}???????8%?+?'??@ ????}????Kr?N????]C?C ???J?D???(D4?6?:r?:J?@Ri????LD?p?t.??z1????/???j?w????????????????? ?"?7?N?e?|????????????8?U?r??????????1?T?w??????? ?4?\????????1?^????????S????????U??????>?|????????????Z????????`???K???B???J???h????????A???U???k??????o?z??????~????~?w??~?[\?*~??0i????@Q8?U?Y7 ????8?q??? ?????????????l???6? ???`? ?????k?%??????9h1? +??P%?|?~6???Z ~???? ?????????????????P????~??Z??g{???=O??9??6?????~k? ???z?????????????j?d????~2?6??f?????OL? ??6?^??"~ ???l??|??`??|??G??|z???|?}`?||?e6?3}BL0?.}?0???~ ???~???i??? ?o?C?%???!?'???6{??{??d+?5??K???p1D??????????F?????_???l?O???3????????2qP????[:???BDt????+?????:?7?????P?e???l?:?w???*?o?,?|p'????Zo????D"???z,??????+?? ?x???????????????[??o!??lY???(C????,p?f?-s?k???`?????H??R?t?b?9?????pn????X??n??Cy?[??,???????????????_??????????????m?-??XK???OC ???",????,????????n?4???????????????B??l????7W?????B??P??,??'?1??????p?N?;?}?o???????d???sk?????W?\?lB ??;? ?X?D?6Q)?B%??5???-?'G?9+Sn???????w?????}`?`??AH????l:?t??<=Jm???|~??)?e?C???u+?????]1??H/??????e??????w?{??6?~k?&??Q?!]??"? +#M?k???0??Q???Y???c??\Ls?Wq,v?Q?k???6W???p?kB\???K?????*?Kpg????C??Bgq????8?D??ez?Xr???B????d??z?}q???r?#s?w?O?i???{?Q?z??>??'?V??^?m=?TP?????????4??` ????x??anA?|?:??_8 +&?g??/?j?A?9?? ?p?M??<}L??? ???T?_h??ZT?? U?I?p?4B?[?7lfg3oy????B?8??eQ??L?c??h>Q?NY ??????d?A???X????T?G>dT??1??w?1X??????????J(N?v?M?F!????? t?'0wq ?>Y-1R?e2zc?8?Q5nU???'???HhI?w????J???I/??H\??e e?(???| Q?_cZK?jN??gz7`^??%b????????6{U7?l?>????_??m?.??W?????M?P??,?~??(_?3p{??? hB??J???"n??$?w?????????l?BG??5Q?g?>Rb???8??P??S?C??? ??5N"2q?? ??????EPM??:??X|???p???Y9??j/5 ??FCK$???5M???? 6??????????W|?????t*??)??J???6? +Cf?U?????Y???X????I?????kEa???O?-???YR??z?b????(R?{to?U>W2?-??k??|{ ?V??V????VP?86??8?HHsH???T??I??H??????po?W??1???Z?n9#????7??????w??h?iyA??5???Vog??W??I?^?xj??4?!???kB???c?9^m&YzrXS??i? ????N?U?I?F?g???=??!??>?q?l??M?G??6_???)S???A???J??`?\????*a????H?s?rK]??r??q6h????M5*???.A^WzJ?B?K}d?)&??%c>?M?????????-/?W?n??$??C??R????JY_?M(C??????B-??'???6]]??~???x????!??k;???^U?2??yu?????????Y9?GY?;?m}jFntOiij:??jdf +???????K???@M?3T?+R??)?????j??k???????U?g; ?????;??`?? ?&???;?+5???s?Dt?0F?S?-?g{tS?l?[M.??z?~?l???-??xi5=b?Gg+???Kb???oi?2??XI%?1??E??}K )??n?(????? ??.?????'If]u~D??F??????i?`K[)L=???????Mi?????? C??|?oX?g?????RTo? ??A?XO??????@=??Y??B????????/:?[?B???-?P???/????????+???? ?(? ??????????????-KU????Hd??d{q x|t???fp?)???M?????_?????'e?A?\?????Z???1??? )??? +S????B????????{r?2r? "??5??\ ???N? L?d?t??5?7?4?M? +??}B +?z9???wB??????zi?gx??f???t??'K(??<??R???$W?Qw?D"6??C??6?O??????u??Y???BmO?j??f_)??J??? P????L???%k?}???+??0?%?????LrH???W??b?K??L???6?]O]????F/nc??????6??L????b!? +??t? ??B??`?&???=8?q8??W??h,w C??????C &F=???*wP???? 4?-Y? +GcE???????P????M???EJDO??? +??e?????? ?CMn )R%)?3?q???eg??ti?_$C|????A????|??$?Z???j x??????????????Q?d?????M?????'"K[.????f? ,?????????/??k??`8??s?Zx??GBaw?6?jN?(e?6??t=???U?????yk?????????Y*???lKd#g??MB9??7?Y<; ??:a8r?????2?l9[?3m9??R?9 ???Z?Ebp?j~^???"u??%Sa7S??w????{R/??g#?#o?J??a5??p"+SI??_???T@??? ?aFY? ?K???wb?(??X?s???D=/??6a???.t??Z?????]nN!F???qQzTlKlE?^? ???;?*?Dn?9?G?k~z~%?_1(?C??x??RF?UQ????b?h1?,\+?????)?????^?{?4?Ox +u?8?:?XT?A??gU%:W???Y?? \~?J2?. I??z??P??kQ??z^?0??[??o;??F?p?T?=#h?Z??|?&*k;nU-O4????\?dh??7??s???1???6????U?f?Dp?????????%?Kv??Q[r??*?? +P?1? ???????<%?2?????$?A??YR E??K??/?? ??C5??(O??E6?[?~???&Y??D?d???A?E?7??#?9???M?*TVU??$?(*??DvGIp M,???????14 M +?Dd8 at kE?>Tp??*?|????T? +4d?@??????????$$a$H?? +?????? ??pG???????9?&??2??!%c??4?????3??????( ?$?F?eH2">?r???????u4?B???????x??1???v????U(b?K????x??b? L?r? ?=????Z????6????f?????y|\??n./E ?a??c? ???????u????X2???C x??:??i??W??? ??6?\?G???[?;"(I???Irqm5??p?m]?~?? ?=?qu???Pl???#??\++??J?A?"k8d0???GX ?? ??-?>z?????? ???SS? |N???%,?g?????T??? ???A-??D??"$????????2G? ?a30UE?g??W?' {uz?p???g????????&E?8??????AJ?4+???VRo???e??-??i???:?amFF???25??eI 8??&*? ???u/??7=?????Ov?U??????E^Pp?%A?$???+i??????Ov{1W???FF???r??P'LW?i????- U??X?c?LZ?{p?oWf?????O?Z +7??-4?????????U?h???)u=h??J9?yi?)~=????K??????`I????l?g??%???[????5/?!?$&1?*?}?????????|/?N??m??V?=_??}?#Q????KC??n??5J?(?,&????x?5??g?p??{????"mu??? +_n???&Q??;??C????95A?m?&??????& ?]?!{??F??m6?E??_6?^?Qb????Cp?1??5+????'?U???? ?K y????{8??l????'^?????QA?/?YCw????5Q?A??'??????i? ?????p:??v?c? v?V&?{w&I?w?;??x?-O??y?]??z? ??!|????jp\?F~]c0?q}?V9??}?I6?\}?;??F}?-???~0:??~??-?Y???jpn????c?????VC?C??IP???8< ????.7???}??k?S=????????Fpn???2c:???VG???Ih?s??<7????.??L?? ?????3?k?????p]???fc(???V8????Ii?Y??}^Gz~?}y-o?}O Z?k|l?d{J????{??=??{????|P?t?|??3^&}%?eF?}???-?~ ??,~????z???Bz{????z??:?{P?sT{??7]#|@?kFW|???-?} ???}a????yH?y?1y?????z?t??z??Dr{ ?F\2{??{E?{???-?|:??|6?,??x?? +?Vy#?O?y????z??p?z~?w[`z???Eb{\??-?{???{>?.?xW????x?????y'?#??y???o?z??Z?z???Ez???-?{???zw????x???sx{?~??x?????yL?oy???Y?z7??D?z???-?z????y????w????ZxU?g??x??L??y??nVy??Ysy???D?zF?d-?zP??'yd??M??p??x?q????Qr?????tms?*umW???v?@???w?'??'x???*y????{E??_{G????{l???E{?l??| V???|h at f?O|?'???|? +??|????u?+???????l?T??Rj?????U??W?!@'???(k?&?? z?U??Y?U??????????g??}???i????iT??n?????,?i(???? ????T?Z?????#????.????|f?M??hx???T?????H?c?8(??????d??????u???}???*?_{????gp?p??So??m>???(?L??~?????????6?h?a???5y??b?Rf?? ??_S?*? (*?L?,*?"K6????,+??????^?{y???e/?!??"jq?;??_EO=q'?U??Z?????J7?} ?T=????5????>a?N???*ym?????SA???????n?c7?!3%;8?]?|W????9H?????B???YI???{???6~?~Gc??????@S??????Ym?,??n[?| _?'?????-I???B +y??RvPmq??]??O???X?bN?D??|*???h???P??????? ??}?H????o?_*?D?4??Bf?.D@?7#????0D?r^?D??`??~??8y?4/?wBX???#ga?d???????k?????Wm????{?S??qT????S?#shl???u?|??p?@$o?????9????????? +6vI>????K]k?H?H}???L??m?m????.bv???????fp?v\? Z???X???????)&7????G?aL6????u?7?Pz????oZMaC?'K??}1?_?'?,?u?8?LCT! v??K]??QZ?? J???c? ??? ????f?????fpT"I=?k?y?N? ??QSi +??k???9??l?}??b?5W?Q?.s&6v5D???)????x????7] ??)[???]????(?`C??>???1???Hja?*???d?_?>??K???K?#??DTd???? ??#???Y?^??\??????g`d??IkA?M?z??%~?+??????=????? +?? ?3??????sqsU;d3]??'?s? `,>F!?????x????e??g"I?r?8?d???;???-?=}? ??KoH?v?/?N??y?????O?????w?>???C?F???lW???U2?????]Q?*Z6p?,\p??Y?E??92? ??_???W?j?TC?"??K w???A???iJ%?????j/ 5???d?P???C +????d???7&??7?[P?G?hQN?b??h??x?tK{???Q??????t??D-?4???P ??q?V|??k?^?E??FX???z)??fH??G?Ujp[Y?X?j?f??????????x/?6^??L?G???Z0?X???6????A??R%????c\???pe3??:???H?T???????[B?B7?]?c?w?y~r??88J ??8?XSx?l??P??i???????? ???R mR)z?ITE?(C??k'?c????0B???A??\2EU??4????? +?HZ??Oq0?C8.uP???)??X$??f???KF????}h4???JH?h.;?!3L?~??i?"??%JM?.Mq G???n??????.? +4{?*T6=??????/????1NF?Zh??????nQ?{"d????g??Hu??#+?Y?|X;p`e?=?cb??/ j?????????U?U?w????`M??6R??2???l????60??j)????!??{?!?P??G??e??P?Y???>M?????B:??L64?V?A)????-J?1????!?H*?q? +e????qY?????M??i?wB5?w?y6=????)??-??M???Q?]?????U?*q[l???r?nD?us?o???u&q?y?vqsEa?w?t?Ix?u?-?zvx +?|%t??n?xt?pWx???q?ye??sQy?w?t?z~`?v4z?Hmw?{A-?x?{ ?zlyL?@l??Y?fnq????p????q??BvBs\??_jt??2G?vT??.!wd??x?}S??k??3??l??8?`n??Y??p??t?r5??^Ss??pG=uS??. at ve??xw????/j ? ??k????Zm????o???shqL?F]Sr???F?t?? .Su??<?v????iY????k*????l??d?qn??pr'p???\srO??F4s???.[t??q?u??!?)h?????j??V?lh??? n:??qp ??[?q??1E?sS?Y.bti?{?u???hi????j5?3??l???m??[p*o??SZ?q`?mEar??e.]t?,?t????Rh3?/?_i??v???w?wJ?x?w??*y?x?m?zSySX1{ y?A?{?z?(?|?z? +i~?z??uN??ev2?t??w???Ux??l{x??GWy??Az??(?{l~? "|?}???s??B?(t?????u????v???k!w??7V"x???@?y???)zK?+ ?{????Ir??{?s????t??~u???i?v??*U at w???@x???)-y]?P?zf??Hr ???As???nt'?|?u3??h?v=?2T~w;????x??)Hx??\?yt?,??q|? ??r?? +? s??/{~t???g?u??1S?v????[w???)`x?5?x????*q?z?br-?T??s8?Wz?tC??guN?'S%vL???w?`)gw??kLx?%??p???Kq?????r???y?s???fXu?R?u???>?v??)Ow2???w??*???j????lB?s??ny???o?e???q?P???sY;?t?"??u?R? wh?C?u4???u??[?v?w??w ?????p???n?(2?5?B??!?????Gc???w?f~??g????Q?D??J??Z?I??n_?]z?|???????,F}???????????z?=?5X ??~%???Af???5!!bZZ?d??V?Q"?Y?H?>~*?'2Z(C??N?[??>4 ~w???? ?{?s?d? ?Y?!?G??R?y??????/B?5????f,D>?e?=???????G??3?J/??D????_?z{6RO?T??$U?? 3?H?+???y),]?????V????E????W?????????G??????}8?v?M??u{???pw?G???L???.?&???u???i+?zy:?CO???=j?` F?i%~ ??q"???????`?{?n??#'5?3d???E????? ?Y?/?????!i??????:s ??%?J|{???w??????{????`j?P?tB[???T??x#??g?9`?!??Od??t?VN??[W???4?'a?????$Ti??22*?CD? ?Q??CC??-????:?;\K$W?|???5??z?????? ??l???.??? r?XG| ????O`.c?]'?????-???T?*?M??????R???5???f????3%????}??_@??n??T#9h7N?`? +??&k?gY?~e?{n??z?7?6?U\?$^??H?q??T7[??5Y%???????e/?iGR?????P??E???r?????'??:&??????a?'??m-A??Bt?????}??_p???????(????e??UL?9",?z??????-?#38??F????|?#??m??)?K>:?"Im???N~??' +???zL????-?T,??S?K ?d?O~K?????k?RLQEO?oonT_???(?C0???=?'????l?%a??jb???w>????{?%?j?????*v7(Ca?Jy??G????\7z?6?????9?E?J????l?????????X?UJ?I??)z+/?3S??_a??iq?'??2??O9?SM??????=???+>G{*N?>-??8g????dIN??v3???@?g??(?=???????s?U?5?Q? +;M???3e7?Qz\???*c??-??`???g??`A?`???a???'??????Mx#????b?????rYjU??^??Z?b???-??T?l?@??6??T_K9pr{Amh????????oK7i[????'h?z;i??o??o?~i(???????????????>??2??0x?w??!{??m????6?U?t?????g"?o??Y??3?ALG??>???0_?F?7!????^t????????s???f[????Y?e??K??=??>??3?I0i?>??"?m?,???A????sK?A?4e????4X????\K]?}??>?l?L0k?f??"k?i??????!/????r????e7?p?CX???K???3=???? 04???"b???d?????????Tr'???d???6W?????J??}??=??Y??0?G??"C?%????????S??k2?ig^z??j?Q???llE??n47??p)???q????s? +x??v!?u|jw?0q~]???rFQP??s3D???tL7??$u?)???v?.?Nw? ?? +y????}ii???yq]:?My?P??!y?DV?,zD7o?oz?)??{_??"{? x??}??~?i?@?:\?????P[???WD +??? 7H???)??o??C? B?b?????h}?+??\????O?????C?????75????* ??3t???? ????)??M??g??Q?m[?????Oz????C_???(7????*/???`??Q??? ?s???gn????[-?>?.O$????C???j6????T*,??Z:?2????????Fg???Z?????N??J?vB??&?z6??)??*?J???R?N?????g?G??f????#Z??'??N?????B????26?????*T???????????????+]???i at RN?5j?G?l2;??5m?/???o?"???q???CsUk??w!??}%]M?~p?R?*q?F??r?;??s?/???u"???v]x?\w????zm?}?] ??x|Q??ex?F??2y;??Gy?/???z9#X?qz?5??{? {?S}:&??~?\??:?Q???wF???<;???/???#???,???B +N??? +?\?????Qm????Ft?\?;p?U?a0????#???Ah???? ?=???????\M????Q:???NFP?@??;V?3?q0 ?\?;$B???"??R?? ????#u???U\???Q???fF3?X?7;G?C?=0?d?q$^????W??; "?h?p??:??[??o?EP????=F???a;??~??0 ???a$Z???????! ?c?v^?m?O[?????P??i??F??C;9???0???$s?+?U??c?? ????9???????lSf??m?ho??o?j???qYmp?so3ZKt?q%B?v*r?(?wCs??z?r???iWq??kIr??`m=t ?qo*uLoqvgYr?waB7t?x$(?u?x> +x?w ??f?}??~h?}??k*}w?zmK}?m?oZ}?W?qP}?A?s}n)+t:|? ?wb{ ??d??%?g5????i{????k???lm???V?p +??A$q???)[s?; ^v$~???cz????e??X??h8?j???j?l???Vo??@?p???)r'???u????bp?2??d????/gB?a}?i??1i?k??U2n1?@;p%?)?qg???t>???a????%d?'??f???|;h??bh?kG?ATpm??-??o?? )?p????s???3a?`??c????pe???{-h_??g?j??TS?m?7?~o??)?pX?v7s????`????c,?$?ee??MzLg???f?j^?YS7l??#?n???)?o??O?r?????tEe3?BuHg??|vPi?{?w]l,gxmndRy?ps;?z?rA#/{?sB?~?t?(qmpd??r?q??.s?r?y?u>te?vuSQw?vz;?x?wr#?y?w??|?x??o {??Fp?{??q?{?w?sk{?d4t?|=Pv,|t;wm|?$x^|5 n{D{??m7??? n????$p]??v.q???b?su?Ot??b:?v0??$Gw?? +?y?~???k??~?m??n?To&?zt?p???a?r\??NGs??K:Iu+??$}v?? ?x??F??j??m?+l??O??n8?OsK ??_S`Q??H!?)'??2dFQ at f"{o????yy#{??,?????!m?\??hODZ???Z???????W|??-? ????"?????????? K^????U?^=?;????y??.????'???O6???,,????[ +&?BC??a` E?9Q??8???????z^ +???????????)???3??????D??r???{\V8?l?????{8??i`?_???q?8???????iWy???B???oD?m???t??M??J?q?]=N?Fb????!?E??U?%?C?????R? &?Nc????>x?Dtl????+?<~?? 9 :?w?6???1p?8;?<%? {?X?g?U???rc?K? 8dM??)??%r? ^?? +hrJ?6i?U??@??????i???#?~??F?92??Kz??Cl???pW?????????? ? %-,G?????h??{+??????&??ARa??K????: X??f?m? ??M_np?E?c??dL[?0??K"b?~-?o:???Q????[/?o%.?~?\?p?16??7??????l?O?h"??7???'??????_?? i>??x-/KR???{*k?0??~?ix??i?K??be?_?W???# ??\I???1 at 6\??? ??N? ?N97( ?"?jT?????6?`g?+???Ke?0???(?}??fmQ????TD)f??e???5?????} ??j?~\?!u??KH?f?????2S??T?q%?:*???AF??7Ix?G????.p-????S??-_???9?5C?? ???????H????2?G5?.E??q& ?????C???\???V|?vP?#p??/V????QXh??_?C???:?g3?BK?\?L(?h'?Q????A?:j??-}1/???%L3t????B??#p???'??E?'?e????Q? ?tG? B????????L?*>???G?]Oh???3%T|EC?fJ A?? +$??z??@]U?????"F???`?{[??? ?u?``^?S? ??r???:/???Lm? 7%H#??4????5?I????*?v???G???+??5?@????L?PC???d?%?l??h????z ??=????????,uR?T?I?? +???:?`??T?OI???D~?^fx????LBa?y??^'?U????_????^R??{9I?3?F2??>a'?? ??? ???ty???D?8u?&}B?[X?7I?i[nq???????k????? Q?9??fV??$?=?&?Ee;Eo? ?H??xF\whr8???=?????|@??q????w??`?/7?b7~k%[?v +?????*?)???????=,?c?????(?(c Vf?;??R???j%?cdd??[??w?\????{v????x_ ?W?{b?in ???ck^?~??u_???%???=????UgJ7?????]?g-??r?2???^?cI@? +??~d:?j A?? ??k!M:?????w?3?Q?{??@ok-???u???[|#N??]08?+?fuv?,?H????4^x?rJ??,q?%2?x?????Tl?4??o7(?????/????I3R? T??GT?U)>t?@?]??Le???4??]?a?????^?lK5?>?T?9kFM?!????????.?GG? +\????,?vS?!A??0?q'/V??U?? ?? ???U>?? ?l0?????"??????l? ??? s~????e??R?NX????K??&?">S???v0??#??"t???????ly???oC??cb ??e?T??Xg?G???i?9???l7+R?nt??Ppq +???r???z?m??Kl`???mNS??;n?F???p at 9g??q?+'??so +??t? ???v?K?{?l??etg_???uS?du?F5?v?8???wj*??xRO??y i?sz}k?!}uk???|?^??S|?RA??|?E???|?8??_|?*??f}???}G ?G}?`?p~?j?????]????Q|???)E?K??89??*??????+?N ??w??-??i?????]*?#?!P?????Dt?H?g7???B*????3???5N???????i????\??>?SPO???D?d?7???C*???????????????_????h]?.?e[????@O???EC?????7w?`?*??#???????/???j??+?g?????[{?/? Op???LC{?.??7?????*???Mm?4??G? ??????a??cV?xeJW? g4>\??i{1???k?$K??n ???o?v??s???{?a ??k7UP??l?I???n=???o?1???qJ$_?Lr?F??tTv??wf??|?`3??s(T??Ts?I??t?=s??u?1P??v?$n?5w???Bx? P?Nz????}?_j??z?S???{H??S{1=?{o1?{?$}?f|54?%|? + ?}?h??~?^?????SJ?h??H???t??0????$?????????D ]??????xT??Nb?JR??d????Kf?5C?2i)???k^a??m???o???Xt??'|?T4??j\I??\k?????mN5??n?)??#p???rZ???s???Mx;??}oS???q?I??Ur??]??s?4???t?)???u???v?v??x???{2?b~S[?2y8I.??y???*y?4???zQ)??"z?K??{^??{??t}???~?R????ZH??!? +>????4??x?* ???????????'???P???R????SH????_>??n??4??2??)??2? ??n?????????k??d??RW???HW???x>t?g??4g?#??)???m??C?U????B 1????d?i?/R????H$??R>P???/4Y?E?=)??6?{??Y??????> ??????????Q??3??G??|??>4??? 4N????)??|?5 ???????? ??????? ??g_??i"b??bk1e?|gmEhYg?oRkR?qGmw<so?#bs?p??y%p???c?k9?Ae?m??hIn?zSj?p?fGl?r6Q?o#s?;?q t?#?r u]wvu??`zv???cwi??e?xxjhax?d?j?y]P?mDy?;{oUz>$,pgy? 4vy3?^?%?C`?????c??:v?f???cei4?qO?k?? ;m?y$qo~x +?t?|???\:?e?_.????b?Jud???b.g??xN?jw?:?l???$?m??? ?s???bZ????]?????`??Rs?c???af??}M?it?":;k???$?m? ?r??M??Y????g\????i_??OrZb???_?e??lM3h??9?k ??$?lm?% ?r8?M??X?????\? ?0_$?@qVb0??_*e,??L?h??9?js?~%k???Rq?????Xs?!??[???,^??#p~a??o^?d???Lg???90i??4$?kt?t?qB???n?^??:p2a???q?d?q?s g?^?t ??_L???.e#*??o?????D)?]:T????i?yo?5o??{s5?L?t???"r~?????r-r?+??V9?????????????^??"???????k????G??w??C`??????????8?4?$??\?5???^?? +e?@6u?Y??9r+?&x?? $9wF?]7 ?E? +???+E1???U?R??9V?u?'?1??Bf????sN`??UyR?BTE?X0?W?????Oe??WT?X?{r??Z%???Eg?og9?????????Y.??wh??? *|????]4 \????U:??I??y?q???J????u????d?+"??H)???*????E_grLqZ????d???%?O8????VG???o?0q?????4^??????6 KH89+?:?r????p{?]????\??????????hl??d ?'???\s?????fu??ce?F?H>?????9????MH????J??7?B????d??lv?TGHU??0?V???,F?Z????????7G?????X +5Do??)???XH2?w?2?? |kx???W[?/?~??f??????@????F?*-?r?7U?c=FS?Z0%??\?y??? ?:?????At?n*??3cIXa? @??tr????5?T?li???9T+@??A?sK?D6???????kc?AW|BN????8????DC????Z?????6?: ????1(????&*i?CA?-?C??e?2?\];??/?_e?J???"?+??>?J??m?????M?G?Y???NT{0????>l??B}?;??M?51Qu??Ks? ?2~?2????V???N+T?{?T? 1???????L%? ?G?? u]?'B?R?^z%n?D???}Y?A???a??+????.??I??L????X?$?UU?&$?f??XR?"? ???????`??S[??d?Psh?'??!;2 ?? ???x???????)???'?E8??D????? s????9??;V&b?Y+1?{4[?O?????:?y5??0n?u???????sU???eb??G? +? ???Jz????a????j???zjr???y]????bQl??mE?\??8|???+g?????Q?6??????qi?????]???P??S?D?????8???8+?8???i?? ???a?5h??S??\l????PE???jD1???+7??B?.*????G??? ~??9?~{??e??z]yYq?t_?M>??b?@???eV3??[h%??pj????l????p???z%dN??e?X.??g?L*??i????Vk?3K??m?%???o?0??qo???t|?{ac?TnZW +?o?K/??p??6?-q?2???sZ%???t?u?(u? K?IxN?`|pa??av?U???v?JK??w?>??Tx2M??x?%]??yu???z + ??*{????}?`???~}U?~>Iy?b~=???~1??^~%B?"~??!~% +??e~???g5_????[T"???dH??#??=T????1???0%Q????I???" ??? G?E?B^??t?.Sz???xH9? ???? ???????[?^/????R????GG??:??|:@T??|P5??,||*???|?.?|???i}2??~Y???'TU????J???????K?d5v????*?????W?{?J8????k???iW???S?????I~????f?3?H5'???7*v?o?FI?H?l??A? ??g??????S;????I??????K?4????L*]?}??-?S?D??"?x M?x?f4???gR????H??N??>????k4???*{????p?v?T??A?] p???i?`??L??\B???^?9?aY/#??d#$S??f?O?"it ,??ld???r??R|eKf??c?B? e?8??`g?.??j#$7?:lO??%nP ??rp????vA??}?J???kmAs??l?8(??ng.|??o?$??q????s c??t?c?Ky_??}?J(?~r?@???s?7???t?.J??u?$??v??6w? ??x??!{??0~?I???y?@|??z,7[?Iz?.??z?$ ??{^;?;{?\??|???Q~!??I&?Y??@????7???C-??r?$?U???????"?5???????H??1?????U??6?????-??9?#???`??.??.????????U????Hm?E? +?~?e?z6????-??9??#??????!?!Z?y??????v_???H(???G?E???6x???-r?j?(#??7?b????Y?{l~o]{?l?`t{?Z?c?|HMf?|F5i#|A?j{?Ts?{1??S??9??W??W|?[??kU^???Y?a??Gze?C4?g??b???h?? r?~???R???U??'z?Ys?Mi?]??X?`???F?c??-4Zf??j g??a +uq?????P?????Ti??yfX%?h?[??@W?_o??Fb???4e??J Qg?s ;q???O?????SY??x3W#??g?Z???V?^??1Era???3?d?? kf??? ?p??f??N??_?mR??)w6Vd?f?Z.?=V5]???D?a\?3fdL?? Vf?? Rp ????h?X???j?\C?r2i;0s?k?t?mC?|?p???d?c???f?f%xih?f?kLj?U\m~m,Co?oK/?qhq!?rtr ?zt??Gan???c?o?u?fqDe(h?r?S?j?s?BmFu?f???..h??`?j??^ ?s????V???|?Y???mr\???^(_???N?c ??>^e???-?h????j6?? ?s?????p?X}?q?[?o{s _._$t_b?NMu?e??`???t9b??ne?e_?'V?g??Hj? 8?l#??(?m??a&p???w?????_o?~sa??d?d\??V3f???Gpi?8+kE?q(?m??ZoD??}v?????^?? ra#??c?c???U?f??F?hc??7?j??t([lZ??Un????v|????xxXKs?y[?d?y?^?U?z?bE?{weS5||?h\#?}?k:? l??to?xt?b2q?u?d}b?v?f?Tw?iDD?x?k?4?y?m?#?{o??}[q-??bw+~Hq7k?o?rom]a/s?n?R?t?pXC?v.q?3?wtsh#?x?t??{u??~?y?|Fn\u?m?o?v%_?q2v?Qur?w_B?tx!3?ubx?#?v?yshx?y?}9|_zgl"l at m?~?^Do:~ePIp?~BA?r?~32?s?~.#it?~?wc}??|~`x?j`?Hj?l?]m???O:o=?A*p??02?rB?g#Zsu??Cv???{?wNh??yi?j??`[?l\?iN_n??@|o??2&q?~#UrI???t????z2?\vg???h?i???[kX??M?m???n???1?p3?s#/q\?.?t*???y??iug??g?h???Zbj???M lE?o??m??>1moz?:"?p????s???y?;u???X.h ??[RZ@??^?LB?a?=??{d?.R?$g???]j? ??m??v?s?} a?f!}[c?X?}?f6J?~"h?????w?0??DY? +????5?y?|I??aU?)??u??R?Z??????+ZJq?r????E~???????????Io1A:??N?? ?? ?|???G?m?1?K??+??????{|6???z?=?????GdW?M?4?????????1T[?????0&>B??l?.?f??? 4?????4?y?F??j?2)=???v?!?`Ql)n???????l? 8??\???td +??oB??@?1????3?H?^???&? y?M? H???? !?h?#?????I?<?J?? ????B?f??%??5?E??^P????]????P?"?????x?????*?uj??f@???Y??L???;f??d?:??4 ~S?3????Ns????????|??"????o>???v?,z?!?[Xf????5????i?-??9V?@ZJ: }??!?"ep??3?&K??????[?_????????_S?p0\*B#"4???(?LP?p&??????xy?_^?;?!??0E"n?V????q???zL,?gQ????;I?0?^'.?}?m4:?????T??c[6zw?4wx#??????2?D??!????3?'0vt?I??^????zwA?2?%????O;?mRU-?7s?w??t???&???????V?????????M>Y???2?8?>`????SDf?????D??????%???Y?b>:C9?L???r +????k???R?.)?-????b)x?eL????v??; ?Gp??r%2??????fs??z?????$??4{?O8????X A7???0N???????%?1???@0iS+???? ? ?{??~???aaI?!%X?ckl?a]?E?}??K?J?,???t??????&?VK???Lt??UC"=?)wQ?WpB???k? ?~??Q??(?F??u@???e?????H?`?}?:?~??TT?jqOg??~l.?&?c +q??p` 2?Lt??2^??I??W7??E+??u"?m5U??L!???=q??????a??4? ????)?????1?.20t??CS?? L??Z?|???????????>0?%,?n??????????4Z??L?f??3? ?j?x?N?^!-????c?Hb||>?????????h?{C?*~ +LR7H1????$?.?O)?.???r?gl????O?=Bs???v X?:?B??a?M9?MZpD? X?^fR??+`?zq:B@???-???? ???Ij\m;???\)8?!?w?z?hM?Z??_&??S???C????L?L7$?c?4????3???c+Q@???q?0?? ??7???7]N 7v???i ???o???S?>?K?@JRo?x7??k?F?x????.?????:=?i |M?h????{4/?????ne]??Q??a%x?*???E???]??|?+n?????????8?????)c5'????? _?L?6????b?"????W?{?5:?5?k?k???q?y??????-kVi??V???7e?9??? +4 ?Q????`?!k}:? `?T??E?????C????J">9?????????0f???^tH??k}?ef?????{c?_?1??v?a??l???)/9Ev??-F?0?C?????yz??E~?3?|?????Ut?K?YB?r????s????R??#?Rv??5??a/F?????et?????In??O)????&??? ??&A???????C??????3?t?????Q???*YU{???????d?# ????^???g1U?????l??k??p?9$?d??WY???T??????R?S????V????HH??-??!?CQ?Zz??{??gh?d???1SXh?????[*y??7????e?v-? +? ??]?8????i? ? ???5n??]R??? ??18? ?k???h?$peK~|??y?? ??i????+???XP?*??j???(???? ???8??y_lJh?5?G? B?N?mC??2???zJ??????"?V??F?? ?Z?M?5?i?????U??@/`i{&?r?z>[\?C????????? :??AwsD_A?{??#????t????#??Q?jW??U#??&?uJo????_SL +????"v??A???}%Gm?D???iG2k??o}?Y??O>????"Kx?????5????_?(?;?jU??+?Ke1q?W<-?0/vQ????u\K????o????? ?I???????????/??x? ?K??n8#??Q???Z+?(VSmN?s?a?,????????o???? +Oo?????6??xY???!?m?????-?k?Y???8??||???U???>U?!n?g5??8U??Ye46)??{VL?QD????E%?\????O?y?z]x?>?????????1?Kmi???~??~?????"A?">??6?ai????:?B??[*???????e?^? ?rM1w??1?C?????-??bd?7?J=??s??F?%|??]?AA???j???c ~At?R?O?/???N????J hZV/R?T+??1???t,???NV??????00 at Qa2y?? N[Nb???????Jg$.z7????)0$???b?,?f?ROP?;??~??*?GEKJAHT?S?D;?7T????????-H;^???$)?Q?W%??1??@?Q?Nq??*??? W?z?^<??2|ax?6??t?5??R??/?0???? E?s?'z]??b?[??????????~Jq????*????^?????T;kS?????@0??????.?Gxd_m??nOj??????????;/????:71??o???"|??%?Y??;?m?@?g?G?k?v????z?v???\??I???TR?????B},??N????y?|??R???q?r+?^Zy?_5K???Wv??Z^??D??]W??& ?^@? +???2? ?o????D?D??Y??7_??w?h??@??t???b?? ??Q?Q?? 5??d?/????5\?u????*4??-?V???/?^$?Z\t-?2v????????X?Uo?V0??e]L????|?0?F????|??? ?i???`U ???n????T??|??s?M??S?&????????m?????:?]98?:???%UOC+???? s?#?Q?B?#:??????]??5???e?5?1-ZJ?$a\;?2Ni?m?M???G???M?h?p?|? +mb?) Kx +?1?Ic?@????????9?p??? n?? ??Y?(?>???=?9R+????By??+#??????(?C??X\?L>G(?3-??r?n??JmP?6M?e?jj???N?o9?V5ao^?zW??aE6???f$??h???y??eh-g?>~F???>e? ??1??{?NJR???D?n???@???J??5?.??6??8?z?Y?g;?C1?P?");C??s?qt1??N?O +???V????k?e???+ ?s??39?*?????????$}x????UG????RS?8?fn?*\???;??5}+?0????)?;]d?A?;??f?c?IF?a?~?*?4?;????????w?r?? +d????I ?t&??x?t??g????i?4????u???//?`?uXX1??5a?r??*?&??-;?D4a??}??Y????[??we6?VxJ?7?TL??l???? ??r???i?a???1?Q??>???6Y?????i??q? 3Y'?e??Ds{p??x^?G?Z????Z??9O????8?BKx???/?&1D???p??O????-??q< ?@??uo3ME?##?M??fx???m^??K_?^M'?~)T2o?_d b%:?}?u?(?DM???? +Vh?P???@?j7SVhW?????u?i???v???s???????u??Q??2#????)X]?^?gl.?KI2??Y?@???3twCq|??a^???!? N/??|?u[yp???AT?????T?}?u{Y?v?????8?????$?ka?????q2????P<4?r??O6???WNtp??U?jr????? ???ZO???????0?????h???Z3?e?g????I??????y-???N`?H???T????i0Lf??7???=c??g~???N???M??R?RJ?Zq???????1???W0?}?????5 ???u(??5?????*Q1?s??????=rMT?5??? +?}??? ????J*?y6????????e7?'??S?? B?R at Z/&????@?Y?Pv?m?V9)h?? +?J?s?X,????E? +R?O???i?????"I?r? ????08b?P?!?M??Da???.k/???q???9s???? " ??????W?j???????=??d2????qv\?????????0)??f`O???;?ZT?&s???*???????I????i)JoI? `? &?[uL?4???%????#;?~???W8}i +w????????vK?1S???????J9; *?1! x?????D?/iA??K?uZ??r:5V?.?$?o???)??-???J?PCC\?C?g?Z????F???o???>???????l(5$Ct?2:t3??A???*]sF???$???????XE??!???$?E?X:??'???=?K?7.?2`@???e?U???0??????Y?v?]y(???b?k??.^ ??: ???IK????m?O4?cb??ZZ?JE????]?F??7? ]? |?????[a????$Q?g?UI???)?F???e?va?\??c?* R??A;??@?C???_A?`?'??-????b +?????l?U?o? {?4?vi^???B(??G*?k?????????B? ?k/fu?U??{??a?L?F?6?:?????"7'HXN_?{m???Ol??m??m$J????R?q?mh?4&?? ??[?c#?????:S9?????B?}??r????HW???,???('?k??t? ?iE?Q(? p?C????w?51?=????,?9A??q???r?????d???F?w??(???lWwL?/?l +?@?P?,{?"S??{CR????%S@????D??(E6(*([ ?????L^^?.w??%????F???>?A?t??hy?L?3?M?\???ai?H? ???N ????b?FI????x%J?$x"E???y0???H(?F????^&j??/# ?NX?tM??k(y??E? +oc??K????R?,Cl&??I ?G???h? ?????(5?J????E???'?[???V??4f?H??)~X??#1??$?? b??D.??X?D(?P??DVv???D?4??O??J????rH?????????? +?%w???Q$????c_0??6ERMGfN?O???KMeB?G?O3J[N??7???F+?p`[???y?????f?.?A?????U???q?????;?-??T?  ????BEO????H4IK%????2??Br ?}?E?!??J??y*??!j??????Z?????:????^??K?K ? ?????\v1?4?7W???????????@?t??TN?N???.?{n;?;;?{^??>?~??4L?,Al??!???H?0?p??H?+?Q???????|t)?*??7??_H? ?&?%s?H?j]?J?I??H?,???n???3????w?*??&?@???VH?XX????t?????e?{L???* *?B????T????<\??L?kTl2jv}???J|?????????/e: ???Q=???}????^? 2??R6q K/?????z?vm??=???F3^?q?n~~478?e???,?"??j????{????w???_??M6??2?kw?w?????G??????????t ?5I?/?7?F`?>%?,U55?AKC;IWB??1 0?3y?????q?{,???PF?"[{*?#?$ ?4w=O +?+?"????K?D0V?FXV?AdCtT???u ??????+?^????d?@ E%%^e*?m?Y???8?-m?.7?q??yiE??A?a?Q?1????$S?Y?y???????????????=?A?Q?I?Y?E?U?M?????????;?'?7?/??> 507?8?:?%??6l5?0?? +_?J#?,?=6??!?=???? x??????bj???????L?,?l?k?99???F????T(p,D??)?!?/???f??|?? u/?~S?d?V?Z??} ??X???C??ch?J?c????Y?l?Iw??3??I?/?_~????1? ?#??_ ??2x6?1????O?j?L9?????"?@iUyX?Y?D-Y?????%???C????????T?_b?m?7?0?1q?lj?0S5?????b??P?^?2vG????;N;?i^l?}?C?/?7??/???7@$P6H=?0?:???Y|?&?5=?{?? a?pf?????tBibS????????iB?Z??YY????s>?n^?????~?? ?0?VEQ'q?x?|~??n at Y??{?_??V?W1W?>0? ????z?[??????Q???9?I~Kc????v?g"?/ux????????^?9???K ??x????v?wxvdg?n\hBk?a*?m???????l?g?>I|?????]@/f-?]n^y?????7??????????????????????dwfp xhtt?~Ry??w??I2I-? d??D? 7?e?'5?z?????N??}-? ??????# 4? f"?7?8?6??-?????? +?|??????+????C?(dP?{?m?I????;??????iZ?E5?U??(k45?R??S.??;M#a?D??t|????;?q??pl???5[?,??BYbF)?>???????#?F?_????b???Z?????O???1l?1D QCl1nD,%?Q+?5????o-p????%??a?h!?*R??[?n?D??? ??q?+)8LQ???????2M??????-?=7?#?Y????oDi?kJ&?V?t*?? +s??k?kn?? ?+?o?7?4S-??O??;|;?w? ???{?{????-???N_{?v}>??{?????CO???>?????????????????ax>|???????????????sj??J????8s?H???vx??a??6o?up?G8Rph??????? ??,,?M?7? +?X ?!????e?I????'d?? +???L??? ??b?} ??]f u +????< `????o?i2?????X?G?C@?$??k G??u?V??^???Z??AQ?????`??'?B????r????`9?p}K?x???Lw&??g@:?$Z ?*?Nk?@I? z?gF?P?r?.?S?????????s?dM?_?2?m??WbA????8/??2???]l??}a?oYu??V????@)?? +?y?B?????%i??`???>9IN?????;?u1?{?=c>??????>????}1??3? ?*M?? ?}???%e9zAW????g?c?wc?{g?;c>??NE?????zc?r(?g??S???n???'?sTBG???vr??m?y?44R?IM@%?5?t??C??F#l?3?/?Anr\[??#8&S??u???E???/{MfR???????!?F?q.<9?????J? t??,?????"??7IT???6B/&?l??B+???/?-k??B{`???K?vQ?O?h?[??J?D??7c1????V?pH)|?$???`9?e??L???YT/K???Z???)?}g7??#-G?Dm?As1??????e????|???V??h?E]v?R-?vkE<r{??@???Hp?????rN?VW? [?m?7?l???d6Ay" WZ??p?DJ?????p?i?8???s30ME?f9:\?M?????["5)[N!?=ny?|???'g3-"?_???????t???????D0?"??s*D?n?c?L;?????Rx?Y?`???P????s?E.?J?"??BCy??I?:?????????'?????_?*c?????????????r ??X?,????a??@,???z??tE?t????C {S???Gk?.???fB_y/????Y???Gi??q??2%_? ?~?R?z?EXbc??4^ R????R???-Sl?????*?~?5H?????oA??o?????Z?(?????~?[Pj?1???c)CP@???@?x+/y/?e??HX? C$?P2>???8??l????S?%?6?O?????I??AN ???:? ??\??? G6#?{?t?7????Uz???????.W* 9????|?0?O5\??46^???g5??Cl????z?~???C?yM!?]?@?GQ???? ???B???,?}?b?O?|?a???o.9??g?????c??E?M1???9?H?&}Hd&{vNa?K?g?T|?? ( ???0?????g1\]?9[?0?5???e????????X? ?}?}??Y??f#?9??:?T?\?l7??{??M?9s ???q?,T??!?vP&n??P?4'd?j?jy?????U?d?a???????@???o@;+Vy(?UI1???!??????#W??m-???,+??|?`x??Q?\S?u?>RL#??????]p????U?k???SqQ??e@?Td?)aV]3? +'`?VJU*B?t?Q ?~?-> ?#t?????0C?lgK?F?Z?*?V}???j,$?IV???? ???????1|?z?E[;???{? +G?e?e???x?R????W?rQj???$????G1W?=?a?>?rYn?et? ??????is??S9?3???/W}"u)??2e4Z??>???7?f~?Z(m?>_???+w at s???L????????Un??P?????"?;6????7???`?k???%9????M:?#7u!??y??c???t? ? ???T?X???p0,T??s?b>.?i??????-9?dw[?8??v???[?U??????]??`[s?:????O????????????qv?y= b?Z?w?Y????l(!^? D??}??4??d1??????Y2mCK???p??& ???:!??? /?P$|pCu??"g???fBW[???K?? ??z?{0s??????X?nz? ?,8?????f????????????????W???d?a?H??L ??????'??? @;???3??)Ox?A [:????6???/??\@ +?v?????4k??u?i??Y???im?????#??9h,?#w/? ?w?'?~?w?}u?t???9w?Uf?g??????e?u???Ro??zw?K?????????)?7?X?4?f?Ts????2?`??8?!??+???K[???????d??x~{>?????/?5?-?i?? ???T?^_?b???R?J6} ??r?]????[???ZW ??X???????w?????@e<???QvbV????@??Xs ?*Lu?9?m~???S 8?}(; q?)*????ly=[?w\z?u?3L?????D??E???????????rF?? ???_? +????zIU?'~/?s?/?? ??+?= ???????{l?????/I?h??|??th?d?|V????C {?|M ??L?@???0??p?F??)T??? g?*F???%??X?9O9"??a???_j\??G??:??H???*9???b61?w?l4? p????????????;M;?pu?X?I?????dt?????R??J?,??"? l?n??G??UY?????r5??\?Dq??u?r??9?$!IM?? ????H*?8F?l?????Y??Q???????k?$?8???NY??????F?? ?X?q? x/~?M????x#???Y%?fy? (~_;?w???a????r?OU>T?V?????|R????M?V?\??P???=?e?? ???????>??ul?|0r???|??6?'???H?hDW?2????v{???i??] #? ????>????I?????a?E???v}Cr??ig??u??"??q)c?v:?"A?tG?)7xdm?'?w ?]|?0???o$.??k??n?2?p????K?????iw?E? V??)_?/@??i???#>a?^ K???????m???"?q???y????G?????|4?????6?????Z??v? (??? ]?.?H?????u???"?_?2??X+???!???d??????????^??3mOs???k????? ?S*\W?U?Q,????G6N??Y?try03?y????!7m??????rM????=^W???'Y?A ????]?*#???&+;???y??????:???????I?H?5???V??5??@??#? +C????9???6`n??&R0??m?Ru ?sWOT?>????o?#??????t??h?!ws??f?iT??????d?w Q???^3j8~8??????W+)?.?2:?C????Q:?G??$UH??????h????????%??~&++?d?Np= ???#R??r?.??K6?N?F??#???m???u?????o*)????dN?X????M3'Q?B??:+??!?9??????%?????5?-~% ??6m??n??M>??????Vi?eQvY"5?I???'??P:???m?????????+T??8???b"??AUR?? +??;?/[?^?O?^S$?F???U&9+E??UB???{US??????? ?g?????Q?>?U?-p?j#s??F*??,????Eb??L??].ws#??r5?2?A?p???y??????i?=E?te?}A7M?pA?"??l?"z??[?=??|???uF?A?q?1?7??S???.[N???E~4u?S~o?i?5%w??.Uv????K?n?2???aN???f?x??>0?TL??m?J?i?z?v}???(??]+?_?u_?Mss?? QG????????an+?S??`??&??l?-???|?*??w%? ?LSs????zt?????????? ??k8b??n7?????Q??"`U?,?6??f???1?"2{o'c\?r$??";N???(?]??C)???Rab:?{,?D???????P3^?JG??sS?Y +M???W??4???T6???# +~??#s?w?&{"???XC? -??|?Z??NdfGe??oY?????{?? +.?J4??c???$?r?QI??9?.(cnOag???o?}?????????9???I?T?eY,z?????q>???OyX?q? ?Fj??@C???=??wd}???.?q?)^??l?^D??k?S?stE?H!?^??z?Q?_A a??? r???f???u???]?T=`k???Y?????r??VFn??h6Q/"???~?[?????? uoS???Y?M?T?????(??*o Ii??UzG?q[ ?????^5?w???bt*?]???L??(?+P5?_???>?0?Tr_?sg?_?????S[]?~???|q@???!I?I?}D?8?? r?- m??????G???GB???????]?????[c?:Q?? ????Z?1?Vv)4 +?P?`#?? ?QdvQ-?d??]e?*?Y0cD????N0?q;2#??m?? /;}[H??D?`??z?-(??R?:]*(?J?K????"8???????hC"|?4??S"????D ?5????Y ??9KR?_?u?^?I?E?&,?????|?v??U:??{??{??~JN?ZbP?+??/s??????3,p????????r???t?q???????z~'???5??????????W???96??>| +?!?ba???8??P?b????w??Tu??3gT???%???X_??),9???n?/????xvc???????oU??#???+5?z?\?????y??}?>? ???J?v?J6?j????*??m????G??~?o??f???y?????#X???n???QM????F9^j^ S?' "@[CU? ???7?.?&\wXJ?xX%???????sa/?UW?L?;d?Tpq?_?:??H??O?4???u?$????I?{*???T??????&t??i^???? ?M?w_?1?G?HFT??:$ ??(??B???Pqb???q ?????Db?|??itx??]??????iB.??w?sD6<,?f?x??Qu?9?K?Ngg1#??????cn$5O???M??????g???@?U???Du?????d:?n?3]?????#?(#q~?$??J?f??0??)<L?a????????H???*OJrJ?6A? c??T?. ???????`G?_?L???BoS7frBq???h?-?????0~ +?????#????5?=??J???c?}uC? ?r;=?$??j??xxKR???>tg??? ???)?T??h??>@?RM????E??[Q??????????{??_*??m??U%?G????(wQ?? ?*?j?A?DxP??i6?_?6 g????US???QP?6M4???j?}??Q?<????a??l?mS??#??t???? ??i?b?0????? ?5?}?o??wfN3?tq?T$=[\??&??g??????#X?c?U???|$?w{?????` +??Q?Ve?ry %????????#???c??Hyh??G?`_ 3|g?*????Zb?2?J???H$?n/???)?S'?8?&f?9??_&?????????????????t??j???(;X7???s??????T3???A??????#?q|???"? /U???)?l?>??Z??;?? +R????&&7+????C.???e??? ???n?|???? 7J'????X?en +Bo5*???)?????V????'g???#???t?]?F???P?^q????=??~??=?ACW E???U??7????? ?[??e!yD??jg#}?-2b? ??{k????:?z???????p?;??????fpM&?$u?%?$o?s?? >U7?.?1???A?$?5??j)0?Qvx????k?)>??u{|???O????u????}?4?Z?????_???yF? ? ~?0?eg?}??1???j?*?@?)?U??n??2??-?????4jG$&u$vO?g?$?b"Iw?G? ????w??????+??mQ}?????[j?????*n/????E???????@d???M?e?d???4\,?^7(2?*?F??+?Q??????>d%ULu????b?n??p3?d +9?- 4?4o-?M[^?I???+?D?]???6?%[%w ??>e???cz|???#???4??q???^T? l?????a??xF?".???{?3?? |?*???~##~?n'?IJ44???+>?*?$=??w?????kg???_?????T???H???? >7?%?n4s????+???'$?????w=????j??*??_-???S??'?iH?????>E?+?F4?????, ??\%a??'v????j??n??^????S??_??H????->]?V??4?????,a?A?d%??Z???5?? ,????>&"????v????i??G?B^\????S@?*??H?????>K?(??5A???D,????&e?%?4u????&i?????^"?A?xS+????H??3??>o???m5E? ?@,????t&????klw??q#a??rV??ksWL???t?B?? v?8??Nx?0??z?()??|?"??~?lj?xPau?dx?V???y'L??y?B??cz?98??{?0???}$(??C~N"??PZlW??7af??~?V??1~?L???~?B???~?9z?30??ui)g???#????l??n??w??p?????r??$z?tK??e?v?QRx?r=;z?*?|(~???k?????m?????op????qX??y\sB?Hd?u3??P?w5??=QyQ??+q{??k?@j??,?]l?????n~???`pp??xrr??c?t{??Pav???=^x???,"{ ??? i??{?jk????m?????o??bv?q??8c1s??O?v??=dxR?,?z??E?Til????kU????mF?F??oG??u?qX?Vb}sy??O?u??=iw??A-$zT?W??i???aj????Sl?????n???up??a?s'?BOJu]?9=fw??-~z?3??h????;j??#?Nl?????n???tmp??caZr??7N?u&??=Pw???-?y?????y?k??/zSm???z?o???{?qht{|CsJ`S}u(L?}?w 9~?x?'????vK?|??wh??pyx???]?y??*KOz???9?|2?)?}??T??s-???5tH???uf???v??Go^w???\?x??)J?zJ?h,?WR0?#}??u>?l?4X?6??vk????#??????#?k??????%??|nB?g[?w??\? ?Z?d??????;??#' ??jm?74C??v???oe???v8? ?$?&??~???%p1?? ???b$ ??z?!x????E0???9"??Y????y???z????l{a? 2????8t0? e???}?o????P&???Qn???4]??1F????5?0`zCK?+?U}9n9p????w??B??`?*??r!5*UWs??'??y??^1^??d? C?5??????GT*??r%?W?????]d*0????&?6?d>???q?S2???N?:?E(???Xwr\& ???u???s!y?n?v.q?s????: }/?f@?ij2z??Ht?+??p??W8?q??w??:?`??(???E?u?`'????s?? ????]x? _?M?m? ?7????????O??!??`??e' +j?Y??{lq@?0? {?? 8X*?? :J?p??????(???^?%??x(?lt?????V?l\3-&?V?LC????P??=??+?$*?????k??h???;??.O)??J??Aoj?7O?U?~W???>????????^?t?P???U??????)s1+?Ot*)???r?}?7??b{,?oi??]????????I??\??KM?C?V?3??z?n?!??~????1???Q??p4U ?G?6??t? +??i???n>+-L??U????J{??UQ "?q????!????E??n?H+-?????}?qI??Yj???3?T?????[Wu?\???4????:?!????W$?????u8??^yD)??z~m??}?)kX?? H??0???F???y?U?7?Q?n?U1#?????d|???&? ?I!g? ?\?:?????f???l?C???%?=J?D)???@?Q???%????g8??>?F?)?bN?n??????V?? 3?'5? ??7;'|'??,?!?!? \Kzs7a???,???tL?????[?n??1S?m?I???u)a???=?2?U.????Q:n%???i?aS??/?;G????Ug?I??e??o?[?[?????Z>L8?|?1 ??}?r?I?Nv? !??4oN(g???f???x????&$?K??w?#l???X???1B??????-?y;?_?c |?l?5????W?x?c?@t???J?y??_?v???*??2???(J?kY??-o??*????Hv??? ?]?U??e+x???n?? +-??r?^??|,?n?N??n??0 +?7????`p????Nd??????????L,s?????????$????Fl?yhp??)u?2??Q3???1l1???1??=DG?In?gt??w?[????J?????x?ib?W?= V?t?????URn???_?^=??-N???????-Tg???Ma?De%g??v???T_O?]l(?D?TO7A? +?????????W???~?B?w?<w?>??p?????}????m??*l?????; +?,b?5A?{?s?v????????[??pTS??_???PY?yF?B????!???R%I?v??D???"0?????_?t????#?W?;E???Nv??G?;????L?(?2R?E?^|+???[?(??q?P? u???Pl?aj??v??"?5}?????@?l??T ?? B???/???1|??;OK?????? 3??W?'?{F!?M???M?Pt(C????b???g?????jQ?c? +s?B"?zK)?L??H} _ce?????bRt????F?kA9?n?}????'????m????Nz:?K2w9? ?D?jNzr??}?????ARj,#??r??f?,?????S\W? ?%eF4?x??3,?????M?Ze?AJ? +DEO??QAj/?(????L??????Wm??@8?(??$/h&?cq$B? ?I??????,?dEp[????{N?Kpl?H?=V?j1?n?&?mI?l???????8???&?@???h?N???(r???\t?]??F?? ?l??? +???!?&S?W??S???????[??P??%i;Lby-A&"?0?|?????eH??????????Y?A??:?/*?3?*E47T??l?DYB"[??d?nT???rI?Tq????? ?}b.??ikH@????nG?????0??(K4k???b?\q,?#?dV??4gt???:???????;Q!n????tU?k7?P?$??u???Z???????] ??? ?lu??jJi?P???L|?????|??bP? ????i??L?L????g/|R?@?|BXL?8??V??? +?1E?7?k????Av??A?8??f?l?6D???xy?`5V??_7%?6?'D +L?K???(W???h)?{{?~?l?v?`,?/D@>?RuZq???6R?i????%Rb??????$9X?55Z??s?>!?J{?2 +?b?pz4??????{???h?????Do??j?QH??????B?o#???aC|#?? +?3{^?????Y?????? ????????3?nBA?% +&???*?4_?hU?$?f?K?????U?:?????&9?2~?f='?&8d??@n-5 ??]?7?N ?YA?j??|s???Z?KZ:5??????g|?_?????DL +{??@?|N ,???[???>?Av??[??y?????????????8?s???r?g??)| +?e?n?NA??x[V?V?5?| ?????~Kk??????L????.6 ???\??0??/r???I??T?P]??iZ9? ?TC5?g??bN???no?,??jIz????L??T2Q??jU?cjj?????????L?HEy?-Bl?X??r?=?n!?^?-+Y?????Mn0?????L?Q????u??I????^?=??????w???l??J?z?j???????;?Cs? ?*gN??:???1C???A???uf5e????j?U?j?+_Uq??b)V???.??<^???J?e/???{[-?6?x]?????N-{?-E?^?iR???[t??T?a)??o +>@??q????? ?H? +t??Y?-Gn?+???? ?H?_?`?/0C???~G? ??i|,?</ )AB??^nh>?????~6??yK?+??.Z???7?1?Z?W???\???(? ??a2??9?vu?????u??)?????UP?c^????>WWko??????^????x?;?e ?_??????I?nr?x?????E?:S?]+? ?b?@???aH??yw?=?????R?f? {?? ??g??+? ??7?W%?????Za???<_??????P^?????1?6???=??|?^?X???V?9_?vGI?>`Y]Y??Zi?B???? >?`g?XK*=T8??I??uP?Wz??<5$???#e?????????{???@#???&???s;?N?U?j"????{??,??$ ?4I?ec?????H ???BE!??3I??Fh?? ?*;?i???n? /???fg??y??+?6???|?-?KFb???K??b?W?3?4???????)-?'2?N??#$?#I[rv at 6?G?x?>?????c?7??a&???!??:??????p?L?~??????x?????D??'??7??GP??5?E???f?}?Y91??0]F??K??$???X???[ #?O???K?}?Ou@??,!???????????[?s?+5?????,??/?k%k???ox??V??lP????`X???T??A?zI???? ?????5z?F?s,????l%??R??x0???Ik??C?k_?????T????AI??.??>?????5^????,??\?6%???>tF?Je h5?Lgg\??Mi?Q0?Ll?F ?Iou;??Er?1S?Ju?(K?jx?!(??|$s?%mIg-?;n?[??UpvP??mrXE???te;"??v?1???x?(???{,"?V}\r?Qu9f@?wu?Z???v?O???w?EL??y;?zf1??D{?)G?}#"?? ~hp???|?e^?|?Z9?6|?Oz?o}&D???}?:???}?1??~b)??\~?#`??Mp???Id????sY????N??R?D?????:????(1????)??j?^#??? o.???hc?????X??(?'N??j??Dq???_:????2#?;??*N????$K?Y??n????CcK???X??X?AN4????DD????:??:??2e????*????$????-m??M??b????"X!????M????D(?V??:?????2b??*?????$??O??mp???fbU??W??L??M?????C????C:??:??2R???R*???K%????h??e#]??wgqSq?di?I??Sl????Ao?6??-r?.?'v&z?Iyl ???|~gM?dl?\??]nnR??^p6I0?br5???ftb6??fv?.b?ry '??{m!u?W}?f???tC\B??u2Rd??v7H???wm????x?6???z<.???{?'??<}3"*?~?e??{e[?? {?Q??{?H??=|N?d?d|?6???}y.???~'?? ~?"???Me2???<[????Q}???6H@? +???R?6??6??g?e/???8(S? ?#>???d?????Z????hQ???(G?? ??(?@??6??v?/J???(??:?7#??8??d????Z9????P????G???????6???+/??u??).? ??$4?$?Nch????Y?????P^???YG????? +?E??6?????/????)A????$f????\T?e/Sl??g?J??{j#Bz?Bm:9? p2K??sP+6??v?$???y? )??|?[???loS??n*J??_pBW?9r0:=?t|2~??v?+???yP%??;{? ??4}?[???seR???t?J_??u?B7?pw:B?dx?2??Zz,+??o{?& ??}K!???~?[-?JzRv?ztJ&??z?B'??{?:H??|T2???},B?}?&??~?"9??MZ????NR1???I????B???:^???2????,????&???"????Z????@Q????=I??}?PA????}:S????3,??? ,?? +?g'5????#???`ZK????Q????I????YA?????:K???_3;???-?G??'{????#c?Y??Z? ??Q????JI?????A?????:J? +??36?F??-????'??g?J#????Y????CQs?[? Iq?>?A??K?1:O?z??3E???-?/??'?????#??{?O?:g?`Z?Cj ck?blDf}?~n?i?m?p?lyZ?sbo]Hu?r=5?x?u%P{?w??5dwk???f?m??i?o?~?l0qgl n?sEY?q?uG?t?v?6?^X???c???*M??^???C???????S?j \? s?%???^?UyzC??n>pBR??r?z?>????{??PY$??c??s?8|h? '??w?.{?? ?X????h? ??????f?u?^"#are?-BL*7Rr?6>Qf?R???i???G?????t???i??&?#???;r??ws@??:?:!??2?+B?:??????????j?^{?????L]q???Q??Y?q? ?K??6QZ????&_??? ???F?j????W?>g?/0???K?????E?F??a?k?I:??g??????% +sC?(J????M?8k??~?;??7?>??Bb?????V?|#??A?i2???5?3Z??z??-_??????O??V?~st??F???4s??as??jP? + +?*?8U??????I?1??E??ZZD?('?/?5l????e?J??s<>?b&? +????)??|Z?W +V????U)??v?3~+???4??Og|1W?+??q?!h????m+j????Y'? ??)??ZN???m??????~/?O$32c?f?yd??i?# ??y|P??!tC[???\x?>???????????7???c?@ ??{????*X??F??#???N! +w?c?LWU/C~?????&qN???J?"s at A(4???0Me?b?*1?!2zK/ ? ]?d?.C???????? \t=???!??5???T? ?]???,}?bdE??_?+f?j????????? K?t?? mg?????4m??N???)? ?M??h???=???y?-dP??.?^W?7??qkM>`??%B?+??yo/q?0??q???Ed+?A=0???c'??????K??? ?C???.?? ??:?k"2MO??? ??,?g"??>???.l?????H???? ?Xd??????$???-E?T8N???;@??S?>?X?????|:?`d?K2\1?p????/????HQ?u +e??$*????{??G4?7L6?)???A???????!???/@m?Z??M?>)9???w?l Q=vC???i]p????j???Kt??[J=qH??????4g?8?kC?v?(?#?, at A? `?Vh`^?=?t~??????6?B{b??)?? ?>??`-??n????u1X &?L`t????1m? &s?2??????????T?s??+?4? r`.5???,Ce????H???????H???M|?H?`?'H?`????.!I??? ????g?7 ???l.???? +uX???????,?3?????}(G?R?????J?iGa?N???.M?c{?H??n?}3ZhYY??n?y???$>?k'?e?Q?3?"?a??F?n;Z?(??u{-???!???gr?????$?A??3????????V?n+w?kjN?YKD?~??=??h?6 +???&]?????b?#?'~:?K??k??f????W??/?t?f3JM??? ? C +YG??R?&?? ?z??v?=F??? +d?v?t?n??+o?9\[?4?h B??????d????c?s ?F????h? ???X????I?;z???? 9V??r?p????c?P?C??@??u?1???U?t~???? 7??????#???????B?????Ll??GT?S?_???q?Npk?^?039??????6#??N?L?^c??e?"??s>?t? +?8?U3?/h?j?ff:??????ce?q?????ls??7f?A???C??+}?????^w?9?p?@?'uo????KI~?;??1??M<{?i??3O?]?EX? ?C?;????,$y_9. ?2????#??????6|??rcJtl?b?? ???F??H?I8\??n?? ? ????eu?pA ?4????NIx?Q?5W{J?o&????i??E?Hu?1??G?b ?1eh7?;?n?N??:????#?SF|?R?'?{??Z7???????4v?W???.??1nT>?????i>.?As????????1?N????6r]?&??E?4????j?\?o??&??A??????T?Z?#?1d??????#??Z?`g??k???~!? +??|???X??[?q`?gkf)Ld????_?&k ???'?q???,???} 0???3????}???5`Q?x[?aS??y?N?A??????b?????E???D9??2?B? C??S????,??){??8~bXq?j'??bM????4=[M???A??\?%????????v??6G???'??eSE?,??(?D?c?H?f???hN??Z???k??f?B???????t??Lesxu?? +Um~??j?????,#C?,???? )??????>?c +??\gaS???2?k4???8Q|?/? ;???z?4U?2]?^?u???R?#e???rUy?6??????????K [U?c?>?:??:q???A???PP@??U ! $d??????{????((9D(???PkO(x?RG???Y???Pk?E???#pe9N?? +e?1?_R??%>C?M?????Qi?P U???`??m?????????q????A????Y???I????w?????|???d?q?;|??K??aql????&MQ??? +9????dk???7(_l?{ +?????"?V?'"????6???9??z+?i?|?????O?>?9Yc??d\ `???n6?0v??#>?????????~Wc?#????P\??9?4?\??????^??+????W?a??4??0'????o:p??,z??#?]Z?{?}?|z??-o?? ??E?k]??S?U??????!??Y??????(????g?y?????D?-??m???vS???s?=?R???(???d????$O2$?Nfx2v?3??????qH?;W??b???????????????H5?l)???}X?"]^Vt??\w????B p??!???4??"??6?7??f$&A?bq ?A??`?.?_??-?h??????? +l???)?g?sw?L +?6??6???9??P?To ?CM?~???U??gUZ???v????????\ArE??h?M?"\????4??}??F?z?T]?"??kU??%?jG?@~?$?N?#*^???2w?H??']????J??K?$Y??^yU????U/}uJ??????F??DS????? ?S)????? ?L??O?$E?,?I??b????R<=? ?!?#G5??????y??g???X^ ?? D?h??$????n????}?tb.$??0?#O?wq?N0?r?k?????q+??h??#3?S+AN?l??'7P???,Q??h????.zSP??K?"6k??'??3?[A??RPnK)?*]Q?g?I???'q?js%i?PSb?r?0?;??[`?x???"?|jg??&?6?bwI??Y9S?mq? ?i?H?D=Y?4??-?4??~2J?/;^j????#??+????^&??c}?N???? ?2??'r???eO?sd????=x?^`??O?????????????@"???k?????G&?H??J???Zg??[????? ??5??S???!z?????I)??;|?<+?????Dx?/ ????qO?????q`?l:?????JhR?_l???C?Mt??u?;OF?&Ht? U?gz?0^?H?Q4h??N?`???%?/???J??? ??1A\??????D??S2??U???eT??? v?M???Jzo???M??o?xP?5W?????3K??+FL ??????a????j|?X? ????^???e????8+???N?(;W?mY??????;?{???d?Ke?7,2= ??W??0=???,???vq_?c?B?H?a?G?E}MA????3?(???t?z>?l6'?? ??T??????????4??B^ ???u?????y?A?`[??6??zGx?f?jV?7l???E??O???????? l???9?;k'??????N??????????w?};??????? ?P(}L??p.RD????L??????F?b?Ue??6 j???sA>?t??(?><]6_R??zK??Ea??B???d5???MP??+?rz]?W?\??T?9?k? r??E?a??1q?8.D??k=?O??P5UI??????n?g?E??Z_?? wi.??%~??????P?IJ?O +?v"B????$LG?YL????5??m6-??|???3X?1? ??K?ED\B5?????5????????g=n%M?LS3?}?|?????P??-V????l???Y?.?gS9j9+??]?x??nP?1?K?????????`?@??w?)?=??z???T?K??y???~??q?u??#u???I?5s?w?M?Z?v?&I7>??A?H?8P?N??????*?5&g???dJ???m??S ]???6N??+???G?Y??7????EE?h????N????'V??q???^?m?U?@??&??K???a??'?r??+d>~\???rJ???s????X?R??-_7???rEik? ??^?P???~S?z??n??w???%?-??f????U?\IyV ???????5d??-????????????h?f??:?(???q??N???Ob?Y??XU???#???!??j????t????q U??q??>-c?T?????7??,E???x?????;? ?Z|?TiE????vh%?>?BP???h?{?Qt??z??c:???C??? ?[QM?8???t?'???&Z?,0??????}? (0W=?L?q??4q-???!??5~_? +|?W?;P???n??k???2?6??Nm?? k???v?C?????H49?7??JH???&0S?Z?s'?g? ???Q:?e?? +???h ?h?.^ s?????-:????\??a(?_?????#k?-??z?r?X?a?9?<9?????u????=W?????sf?A??f?????.A?]?p?hS?iq?")e?T??0?Qy???EqM?D??}?t?f?SF??7E???????????2??/WH??u_F?bX???:mQ? +???5q>? ?=??H)???H???i????*~?,?_?H$??L?j??U?K??J??{?C??Or???&?K??Lg?Q????^??_?Bu??qR?C?^?K? +?]?iT?]?o?l +?i8dH(+v9???? ???4?#0??K?4?L?!???)???????????F?|?I???&??)S????@? ?E\??&"A ?????? ??y?d?y0??@?7d??????}???X?_? |????{b?W????l?fp9??8????(~???U?v??qFPbd?Oa??q???qby??O???^??|y?v?17m?m?`?H?X??????[M?T?c?g?? ??????????????????]?o-??,Y??l?W6???ZZ?8:????M?????4?+m???/?[?}?7???f8>??z? +???Z}~Ce?m???;???*??8?????R??{?n?B?n??ybT8???????L?7*?K?e?S???3????"??'(?????7??uc?P? g!?A#24?7f$_?l?xH#L??wQ?T?g??l??y?V????(??'??@??c6??#????7??d???????L??p???M?5A??"????>?x(??c??e?4b?%?n??%??4???b"????"??H?s?? /??w9??s??l?1?l??M\?N????F??54?_?7??? ?C???6?!???\?G???????A?\Qg?` + ??hS?u ?*??? +?? ????%?????~mLf9??-_ !c?? ??G!???^???~d??y?W?B?p??:?e??v?TO??*a?0'???X?'_??{????F????B??????S??h+????c??%????p????x???????aGA?}?????????k'x??f9?i??A??????$??P3????I6??E?+q ?????K-?z? JY?~???A#C?k? ????G2??#??x?&}????o??>D??[ -?D^??m?R?X%N???"??e??n??.| M?m?!0q?eC?7??C?X?_?h?? ? +?>m%-_???(=/???Z?WMC?4?o???`d ??E6X4S?-?&??W?]5?"??4X6?$XKP?3=N+?b??eU?"??(*?o?E?Tg???|? ??a?OT??})!JF0C?C?g???M?????d???b?????M?? ?{~????;???,i?b2?w???hJT????|???+???b?cwL6???????????E?qr???T?B?&?f,??N???????[z I8h;??0?S?e?=?n?? + ?uP??r!EwFV?u U?I?e?jZ? ??0?BdE??H?4DT'???K?G?m?WD??%??0???8j??B??%???!4r??:? ????8 ??%????T?+?h???p??wZ/??????0?cTG?@D=?mX???+??GkF???j??O?i)?I??????i???Mm?z??d/????xlV?_%?jd}?3M?_ 5??????????[?c?)wf??\??$P}I??}T{?i??i ?z??R +w?^?$??$rU27???? l,0D???)"?^E-??#??a;Of3Q?G????????k9??4X}?????R?y[?r???? F??????\,?*a????m??????r$?(??4???????5????5?>?1?44fn?s????8??}Z?Z????6?]?g?? ??????????R?0??@??6??Ra[??y?J??c?????Y????f??=+???????&?47??ia???????????5gxl?a?y?g??????8?*????)7??K??$??vU??q?l? ?P??j^?4Gxq?{?0?N???|Sh+????&3??`??@??-??+?????!?J?I?&??~B-B????txw~8vC??q%?F'H??E???a? ? +?#>?? [?>????s? !?? +F?J?? A?#?3? 4:???????8t=v?-??;p?w?($x~W??6?????0? v??$?!?????;dI??$?^?s?^?W??t?!p0?t??U?A???tW????p^b?+????Bf?!#CH?w? ?4??r?????If"q??????6??Y!{???2C????/?F?\CV??h???????5w?????#r0?????:????`g??????? ??????r??S?g???Z7f?u??7?2K???8R???e?g???u????????????<3?F??'??e???7o5????????O?0?%y???\?e?? ?b????7_W?|?????7?B??T?=??3f??;??G-????L????y???????????J?v??]K???XG????RZ??fx????I??LM_?y?V??????2?T?WP?>???FG?.T^?{?B?/???Y??D'??%?b??I??A???? ? ?????~??A#?3B?>??VOuM? ??=?&)!???C?c???1???? ??^W??????4?7??F????`?Xe9?E?n??Zw?xC??+?'??X? ?m ')F?8J&??@??????uWT>QjI}I0???l? ??OO???h?????w?Z?s???H?u????~??#???&? +Ih?(?(?:????????N???????b[???????J:?6Sx?0??h?]?{k~? +{?,? Dq?W??t???Q-O5????Ow=3??n?%}lP?w ??N[l???(:?!4????r?Z?6@???"?@?^S? $j???Hu[lX????{??? ????^??B?}#F???v??i?i?Rj??>2?n?}?$ G;????]x?d`??7????.??k?9?x|y?1??]/????????b?8)!8?1d????????Nb6!T????????'???7?_2??pp;)*p v*???"?md.?G!I?j?i???*?06???LH???7d???_>'#??z?0BA??Q?n?????? ? ??Ic?????????%??83??[??l?%?[??f???y?X?e??Z2? ??<[???8?6?8?????Svi?yQ?DP?D???C?K?7Ulxz??Q?k??k??-?;?A???F???6T?(|Qq?NT!?L?^????&r??3??m?Bv?9?*???q??ZQ6t?{?P?????%?2?Dg9Q?? :k??S?er???l?P?]?b??????>??bv?;???k??Ac??{T ?]?d"W(???'$???? -c?Y?UB-??????N??C?Pb??*5????? (??'????I? 0?Z???N?6??sk?G??????c???1_S??6??~"????+?}%??*_2v??w>*Lh(2?,]T7,????cg? !]?)?D?*?]?+ at I?hi~8=v??9??Nf*??Ui9??q?????s?I??|B????N?Q????VmYI????8??? ???i?H??:?^?p????:?3h?q6????Ya|]???8x??H?`~???5?f?`?= cks??O?o??>?L+?z?:`????!?vt?&?^GZ?S???H??V??z?};?=?L???x%P?????-??????{?P????????^%o yw?~??????v??ee????(d??????q\?"?B?I????h??Lb???? X?v,?Q?P????r,C????Y?A?2?H???A?????1?W?????????e???a_4????:&+.Go???^5???| ?m$??/?j???Un?????!??????a???M/?[?[?t=T??5?9w;?+??? +?X?5?R??S?????s=????????&?K?]*??:=??@??? ?2u???? K??-%?? ?;]?Zi1??a?V1]???e +??o?{?O??i?k??; ??9d?i3X???9,???q???P???+Sw@????=??z;Ef4??M?8 !??2+?????N??N?/I(t?:????>???i?]?? ??_??q??;?8ZH !?If?K??? ?@Bv?$O??$a?Zg??Zp!??? ??O??YW=[?d?T?"??~??/~E????I?}?e??J?mCW}? =?O??f????j0??v" ???????G???$?_???|?_??JD?Z??y??7???,?~%Y'e?q???T?K?/e???b?p?Xy:????eE?6??6?1?Q7?<N??hAM?Bq.???????JC??O?c>?["?v?,?????? ?r?Kx&?\z?|??/YX1???jR???&a?e??(?Xiv?MX~jYq`?I??f?(? ?A^?9?nS~^?b??-??2?r'????5,???l???f?????-???B?6?/? [Qr?xr????Pi?%??0?^ k"????x????}???=???>?Ss???{?i??'&e ??? +??'?V6??@?Mb)Oi???Y0???O??????????`?.Tt?x??I:?lEu{?? +??>?A?????4??,??I???3?b?8??y1????A?????0!?/F,?????e?|N??Z9??!??,??2??W????????9e?"???>?Q?7q?? ?+a#?5?gi??3?b???_Qd-????P}A???A?9xB?V?????K????"3Fi??????M?????Al??p??{?^?n???R?N????U??1?$?tC????? +M????9??H?"?U??K?D. ??l??*h???"?=?LR??????? +?L??p?F` g????PZ ??P:1 +?P,S?b????????)???????Sp?X?=fOY&??????l?w?2?M?????? ?i?k?b 6? D???Y???!?x(=?????nmm??l?d?$??j?V2??{?_2c??g?g??E +??-r?\"c0?f?0?1?a??wi??T/?????:?????????~??X?^ ?g??28???3?.|????D ??( ? Qw%???????`?r ????AS7 ?;E??C'Fp?P???j??????+ ?,U??r???t????>}??|n?r????"''/????~?u?7l??y??m +?w|?s????????}??W????c???qB q????i?3zg????``x????et???_~56????u??H K+k??v????]\?n?q??????????Ba?C?8<@!")4?I???{?A\???G?????????????^?L?x??????????????w?%?K?e???Jz??????cs????F~Ss???????K?-???I???C?#?c??S?3?s? ?K?+V????'9y?u????7??y??-[? +????S????[???=p????"??&p? +????\?K??m??K8??5??7?-??m?n9:??v??? @?? ??pJt???G??)O???H??????mA!,???o?_?H~??5aOX6?UaW? l;<??????SS???????t{h????r|)a9 +?.U?jo? +????El:??I cP?g???? ?e?b????Rj?\?ZzO2??Q???!????9??????????K??lY?F7???S? +?VG???_W&?g??]?u?4??"?@g?j????????rmb???~????lY?4???1?Z+?1?E,?~0??7?EO?I?L??u ???2??-vyI?? H??~?????t????f?????????N;????Nw?nc???5? ???WP?#**??(Q?EE?C??"r *?"HT???u7?>??????????p???? ??Br?D?????Kw?1??O???g?[)D???H???#Hp{fw?f??M????wn??s??%?? ????????~{?G??)q?c)?kt?4???Q +D??%2?R-#???m?&Is?y?@??k?m?z??2\? W?+???? ???#VX?'??%48??$+J]f"8zzZ=P4?@.W???z)?ay??????l??g??Ml???G??F/ ??~????Cvb?]?3?@??L??N??p???1?@C/?[??(e?J???vy??eM?D??????V?s???p>?? +???*r +?????7?I?????D??"'kF5V(W0p??? +?tw?u???lgm?;X?=????4?|????????m~fk:??.?k???,??Q?L2? NhEY?Z-??)?????-? ?N???i????S?????P?SYhGW?$?m????? +lPi?r%???5????*0?>)V?1[?J?lT;?e?Q??]??>?? ?^@??8$?* A????jL?b=?U??\?G??UyR??P?--??JJ?k???rQ?F?t???N???O/ t_??W????D????Br??s? ??5? ??:?XD Uhi??@?1_?k????U?e???q???N. ,?s?????{!???8??>De??????9?OX??,#?3????QeK?????????D_9?????_)?}f??????1???;N?=?Y?Y ???"s????I;?4f????4?KBHT?$O???zE?/?j*?+tXi?3?d???1??Gl???????????w????????)W|={3?{??J{i????S-?L1A?3?W??*?8E??H^?)???v8???!??k???U??-?9???V??????G6??{-`*q9??????3?U?Y?????u??H??(?ey?j????z}|0?????? ????? ?+?=???^????k?? ?Y??=1"8?K??bC??c}>?_?????lm????^y??5??g???^???M??F???qX?+?@t?t4??z????2K?0v5}?? +?Q????]F??? +???E?Lc ??v??_????????b????G???X??# ? ??q'??????'???_?o@???pJ?9?Z?D???K?0???y?C??Gz?_?I?3???? ?C??????P\?A???P???~???r?v ?`D????@v??Y$?? ??{?i?o2???G????g~????{? 0??yp~?q6?0?? +C?aG??k_3??m#?e>????1tb????YUc?l??]%??_???2p+??F?S?????{??;??????{;???Fa\?8?6?fK?????+C?a?N???0??t????n?}?v??l???Nf'??v??sw5??x??(?^ +(j????? +HDTT?F|#??6?a?Uu8uEY?0??ms??5???aG???P?m????qj???vG S??e??u??e???? +?n??}?7'Qk?P??>??? /?Z`? ?H?????MbFE????A???[`?wWZ?ym???Y?;+??e??d?]?p??usW??T?ti?S3?YVK??@??p?j.?[?E??*?'J2???y???2Kq?R???y?S?L???v?? o?n?.???n???_??M??d??|b ?`????Xc??&?x?t??BO2??ei?Y??=v?Tf'?@??????|?????????e?o??v;???h?c)????utk?ct7??1?Xy?)MU:C?S ?Y??GD-?L?J,?r at p?s??6?^????????-??????,yr Q ?F????<'R???jp*?????d ?H5???g? t^?7??=nC?????K_o)?]^6^?4-?y?W?"?[0?tQ,??o?Z???9c{??J NJ[?+?&?2g.O?9S?#k ??6c???w???'?8??|?cU}?????????fB????+?~???Vc8?N?j 5Pl??P-i?\s??Q?q?P?f*W%?5j???C?Hx?????;? _9f??7???w???nj? :?? /go??Mkpv? +?W????.ar???6?(??%?[?e)f??P5?]??????_??NJA??w???G?,?????C$%??{??G??R?_'??S ?? EQ???????H???_??????l??? +p???@?x???????A?\,?'?D +???@|Z$@?#???0?? +R?Pm'>?s-<????B'(p"?X?? t?fc?????vWy~??V??(??S????Yb+?'?f??-??q?Y+?z?Z   ???? ?nC????n p? ?h?.3?j???z=?????r??????E???Q[Sj!???? +3qlb???T?u????:6????????>A?+ t??y??????-&??~????'u-W?%????{ ?u????X ???SSZ?F%???F)5??GC?y??v ?n????74?MO?e7?l?????e??x?r]??^Q^??J?QN?V*??|????????;???7=???.?6;??-????QC?xgwk}9?#?z??????{?vD?g???&K?'b??t??>Z??? T:s|w8??????????G]?M>t?/???/c??W??}4?m??]?E ?c???!R???LV& ?(r?)*??8?????ki???L???????????F????????9??%???:???Eu??3? ???s~??ilS????????Q??S??a??3??>|N??0\??? ???u[?ce???????j3?????F??? ? l??5?????7`?_? ??????>?T???g ??C???????s?:?Ax??;???[?? ??t ?,F???md?>?????R??dVl 9+??L#V???3G??fc??_??gr??????)?*`?[i ?? ]=w$??????/?g?}1??r???Q?>?z???"?pw|??6|9?UTu??.??@_ K(?QR?\?,?????YfW?lE7????@|?????????????0a|???M?e??????????? +w???????u7vW?LP???0]L\?m ^Hi??g???Y???l?9@???'-W>_???n????6?$?f%=?z +?{??Gd??gj??wN?6???.?????? EF?O?cq???o????$??H??#r +????s??? 6??????o}?????/???%.?aQn?????48"???p??, D?"?L?#@ ?(??aT???=?n???g???#??S?r?X\??,?i???@??3 ?hE ?7~p??????`????Xw?{ ?n??7(>???7???ePhw?9??????JQ?c>i?7???Ngm&'?_?3*??X?+4?`?R#\ +????C???@n?'??u?i?8y??C?p???w(????T??A9,??Q? ???L? k3%?t??\?.0?u??j?1\?`:4???E??d.D'??MIF)?_?R???t(??*?@%*=??r??$?3vr????????????g?Q?B#???@5/???,?*[?j?>L/???J???C?R???t(?q?mn?JGP???XP'??P?e??]Vh?f6????\???-X?-????a?|x{???,|??-J`5??T?{?% +(R??E???(?????vq??@??]?m<??C??5?st????lH?wV??? "l?L>k?K*/?,}?K?????t'?h?%?Z5?"e;?a??????????S i}??|???C??Pcc?????????Zj?%?$<?eLT?q?y}??Pv????hV%?(Zi????&uc?NS???Y???Y???1???x(1*,??Z??2??M.??-?????&t?3.?????/ ?A,?P?L??ee??? ?d)?I?NQ??????-O?????t? +???Peq?Y???h??N??XV????!H???????aq ??*???D 4i#C.??W?2xjAz]WE?J?K???????t??z`??g?????7???(?=U??Y?'aR??7?D??? m ??,?6??I?ir!????akxY5Z?}??(??/????:S}:p??????Z[??o?-????5n??:O?S??4?F&t??????p?|r??G? +?????*N>KS?h?r??? F?Ao?3??????9??????|Eq?vA??2??!??x`????]^ Z.?M?i?V?*???K?QS?b?)??T?????????v?????/-??|cD???~???0???k?#,??O??xgG/a???E??????L? x??o1????,$??^????L.b??g*P???e?7;???L???mk????S`k???L`?K?????A%?N m`????k??????Ur??JT6z%&??m"?Lg??r?L?)?r?O=?13???6??????? +??i6?f?7\??-*8?Gb?%???_|??7???????????! ?d??Fd?}L??{Z??zj??)???v?)??.'w ???J??S????Tp?!X?G?[???#7< ?|??X?? A`??? Hd8???#`[?H???(S?;?0S??0???;???p?@8?c +8{? +.[??????K pp?.??D/?@c<??p?? A~???? ???9?D?J???L??;3>????s8?M2???(???Pp??[?K+ ???_v?????????????????:???n]?????**??"?\r?B W?? ?$?"?r??$??????[@Qn??.v?O?c;?????????_????????z???? ???y{??>?A?7??u1~?? $X ??@8?%??=?W????z??`@ ?????E?????????o????dZ?&???'d?G+?k???UDg?j?`?r?w1??????????7?o?_???P?pPo:?mw@? ??hP?H}_?B?`2????W??,'o?T?69??x{-F?YEBu+?.?J??U?$o9????%X ?"l?? $??????tsL?@@?(wS>?c??m?????K)??$?z2???j?]?Up?? ??{??K???9??`,nV0??L??<+?&@? ??_l???LG;?quo?@|??'???9???E??Y??<,u-????F??J?qQ??qz?BL?p>zD47-J????H?S????? ???f?#?q? ?.????????r?&4q???I????e????bns!?Z;????$?S Z??S2on4%N7>J????M? ??{(????O????@??h?????/?-7?????J?-xIH-?CS??0?rIu?Y?????9E?8??P?$????yR??>??2'??Jk???y??]???^??|C_ ?b?x?/?H?|UR? !?<^?eZ +??at??qW??!???A???>UG?? +?W?????l??@? d???d??]?[??Ko{lK}?7?K???=?J(yJ?O2?1???QE????????,??%?? ?????4?c??`?g? ???t??R?k???????J?????YQd?87???H' ?????b~{1A?Z?????*-Z?Q????9&]#zT'???1???? +6P???]<DW?}?;9?K??????]????7?N)??[???5)?Z6??X????%??*????I?/?I0-???'???}]???S8?lM????h??w?(?8??Eu?n[??????Z???B??MaiC$???Hjcf? *??D(???? +q)[?P,j?a??Zl??k10?,?xDoM? 4?P}?;???'?:?~?t????q???}???o??7?? I?# +;(????j???2J?DD$(?x??S??g?4zio gn??,8??@?'?n?8?-h??????Y?h???|?U???\???x?w??!8?WYh????T??&?/?Hx? +e ???xzj???T??J??????Pa1X?'C???? ??G???OnC??????_?qs??t?? {?00?SQ?c*aS?????`$lNI??hIU??*o?Q:??d?G2????k???????????u??3+?N? ??S&g'??G??N?d?!<_????J??t???T?+?5Dm? 3?3yh??M ???X?????O?:l???4?????;??~????g??n??t?????{pW?o??QB?PO??Ry4q?? (T??xm!??rE-????:?1?en?e?[? T?l??q????? ]>???pl~????I???IG?>?k??A?MOP???n???k????LQ? #/h?5=?T5gJ )}k??dL??[3k-kt??? ????:??k????????|oy?r?????? ???/?P?V???WE6?88C???-????4dm?????j ?:?%??D????]?'??N{v?Z??o?L?6:{s?????,x;?y?5Mz??F|?9C????HJ??XvAw2??3[?????:????ZMdg?.?{??6G?Gu????}? ???m8??e??????? =??????M?+???@???;s'?z2? '`-Hz?)?6m8?5?/FWJ#LE?Y? +Q????"????X_???_???6?O??????4o??o????5?3?U??????%?h???y`~?Bh9zA??C1?'"z.?ww??!t?% |.?-?K????????j?????????e???????????[n??[???-/??M???Oi?7?d=??R??Y??K?YO??Z{? +_-? ^!?,?r??~}D5??q??xpw|[?2?????Z?Q?K??!?  @B ?4Ho?@-B/+?"????6,O????? M??f|???{???????mK???????b=A?? ????3? 7 ???!?S8??h?H????gHo?iY?3?L-???"???2?XL?lwA>u?p8?n?@???~??7n??;~R?S?????a?`? ?S?????t< ???+E?>???ID?4/6E?"eMR??? w?$ ????5????VM????gJ??h???s???? ?U??z5?Y}y?`} u?NI???????A?3??}????????w?Q{???y??S?_? ?Q?+?H?)??w???6?9Y ?GY??W? +?? uM9???$??A???QAh(????t_??{ Ho??????|?b???7?M'????;?l?\?????????$u?bi????-???:1CY%??T??????Z??A???$c?5IY?M?@??}}6t??m?2?;??2?o=s?u????5G??.g??H???A?] ???*?{??|K3?????o?@?g?\?????.??W?6?$??W$$(C?1??4?26?????&????EE~??/?LwH?~?`??????-?J?kA????????=D1e??5{ls???Q???P????T??Z?r?g??)|?_$[E??Q???y?1)???1U??????T???????x?fa0?m~B#(??5.?E?"}O??n??N?????fk\??`Z??d????*^???W=I k?a1k?(?'??O??&??%?>qy?5?k?\??J??R????.B/??k2Y?? ??Fz:H?????}?-kf???V&%p?h8?L'4??,p'0??D7k?? Az?i!]?? ??!??O? +???I.XI[k.8????.Q??????Kb?A~J?BD?0??? +?7??4g??p???((??DqA?????!m? ?*a?F 0?S?T0??k??`g???|:??????? ^^@???P,?~.??9C?4p?Q :?8?r5??? ???4?????E`??{w??.6???Qw???i0??? +?>?n? >??st" + $?3 at 9???? ?L???\m??97M??#H?0?T,m@ +a???uy???x????????A`?6?}????????)0GO?? +??gn??'?da !?6fe???9q'4??7?n?????n?C??|Q? ?V8??d???CXF2!?,??>????~?9?|????}??????/?????g}g??ps?/T??A*?????? ??????n?????! c B, +??? ?C? ??p??G???]'???V?W?-??5?2?.o??7y?w???;.Pa? ??fo??/H*?(Y%?????l???? ?6?t?C??bboGH?w>?(?z,??-d??s?>R% K???ER#o!P????)P??L?' ??^?N???o?3?1?\???y??*?K??n??????1d:??q??r? <??qk<:y9!???g,Dg???E???e?YJ-&X??+?z?tPO???~?d???M??h?4+???~ +?????_???cj???B???s6?<{????e??>???????n???t???|?4?$y*?J???,|rM?&?+?5???%y8c??Sf??s? +?\?#?\??@????o??3??;??xE(5?>??"c?x??a?[?E?f? +??(?g5???I |PZ?5.-E?K??~??m????5{???lj??Su?P?!??I{1?I?|?(?c+????g?tHI??2?\&!+?y *>?K???h?yV5'oT?9??h?[???? +???[????|?~?????? 6?r?r???+???U?dS??LJ??Ny?`???V??? ?????????-???_2??c??i??????w??Y}?dc?????xx?2??!H +1?)??K???? ?VEe?hH?v-??HGd?hIt??A? ?8????y@?????????u????P??ef??????-u??u?gc? ????T?R?E??s?|)b??@ '?U*?*j?Er?)??j8_??F???4? .H5????&u??G?PdU?hFU?*????^6 ?V??V?z?d?*?'??\9?k????w?????;J??;ODI??$q[/\?]O??2H?kH????? ??Bi?Q'?`l(8WgV??S???????.Z2.s}[??e?s>l`??=??r???/??????_?{??????;"??u.?????H???v??~?L4Q?1&? ???1?K"????????k???u???.@???c?????`??f??=? k{?af???5?}????) +???1????G%?zc?=?9?fxn?E?y@??}?c?6?9?=u??^3/?g?w?s +:?>?wp<\>)9?e?????|??????!C?0l,???L - +lR(c1 ??A&? +??P?p?j?+4?\??A???M??pr??????U?J????? ?`?= \v'??C??Q?M?o$???bC??? N?/????*??F:Cq?#TD;??CZ? ??V?}? g?"%?r~l?.?-O???6??????o?h?s?? ?0??x Dy?C??^?????;?\ )? ?@??m? 5??m?{??? ?t ??????2g?0??] ????????S ???????!???D???6P???Z?x????H???0??4`&b!>%H?Q@,$??J???Sg?_???3fB?1??[???X?^?&0o??1c???)? V?? ?1hm?[.??Z?yC??N?c?@??x????H????b>????#??z?I???????>??????3?}?y??;???bu?>???E??B??k?=$X,??P?C??9???????X??D6v?-!??J???C?$?MD?1?+Y????2w4?&o$???????????F?}x???O|??5?m??\2?5?t? ??f? +???p?99?oZ????B?+??????b??KX?1??wQ?y????? ?o O?o?????;R'???????v??}i??A??PX??L????6u??\f??>=?wRF ? #|y??Id?1?G9?q'?}???^R??u?UQO?]?_???????qg?N?N?Jn??e??mH46??bP?Y?6??sh???b?'??{???B?d?????????rrX=?"^7?T?E=#zI?(ywE?/?o?Y??8+bD ????W"@?????=kf?h?}????]???]?W-?t??S[?N0~????80??|???s>??(?J?eeb???????Z,?????ry???L^B?,&"X??? ?????H/\V?y??G?I??2c?e??????L?ko??4^vf?TzzV?????F?/8?+?K?c?<^n.?)?87>?r^ ?]x??"??????3?1??8+jH?N???J????z??^W???]??s`?@I?FE???????e_u?????7??<$BP?Y??Z??K8????/???U???Z??]a8?R)????????8Y<$+HA?v??JE??????w??????????@????`c?|??aWv?????'@?[PxfU8'?"??-KH?.???*??W`'??? ??_???????b?@??!M^ +?%?.??^?nA?o??C;p????Gv??????i?????-?.yM???+??)u?g?k???]?K?????VbYA?e?~??r ???y?|?/???Mx???? r????h?K=?F?#?????k?????:`?Y}??T?ck???r??p?Nmc??Gr?#?E4?pO^MI h?I?i(?{4\?vk??qix??V?&????X?(j???-??D#??$??????}Y?}_???Q?m5?H{???? ZQ?? ^??;????s304??TTH'??3???????Q?s??Ow???`v>?p?>???8?)$m?????%<k?D?????{R??8?N????7W<??????zvH??S?2?????????I*g?i1??/?+X????QcC?'??qKG??,k?}6? ?B;?????}?i???????E???O??fC?~?????@?,?1?Hp??:N??Cq?????P?B5?uJ +??,??UI:????1+???pX9H???????.@?c???? m??????!?la???,?? ?O;?zPE??6k????6?Z????@?'?t?$??????F<r?????Jn`??&{`?????`?m?)??k^z?????G!?@? u?G??H ????|U?{????? B?%????????h????2P???C??{b5??l,???v?X.??B???X?oA ?_2???#'|?P?}????0&???F?;?c???????!??AOS???Tp?7???Ol ;???i.`??x@??U@??p?????8M??L??_?r???8?*5???A???|?? ?????6???!C???ADw?;?`?[_??w*??^?Psn?? y?\?}>?[b??1???w? ??2N??"?}>?#8}?p??]? *??G????n????tz ]HS???i?3$ u?@?9??????9???' ??????NS?"?~#?xt??>D!???a??~|:? ./?[J??T????????]???/?????A????=My???X? n ??Nv@??b?=PZ?:?? ???OR??a??{????G?1d?^??MJy???PL|?? ??#??]???#O j)?&?@?7?h?l?Zo???L?m??vh?`???})[??{?i. +??1uW(??????s?i?"?X????K?2?E?YA?9???&????R?D5???3?IQ?.=????s??l???4?gC%???M?RGQ*b??R??[$M?? ???b?AT??6?? ?V? +??RD??d??R/??0D? Q??U/?)+?'???|(u????f??g????^q????ky\4?La +?P?#"5?^?F??$jbB???????????`?> mDV??\??????.?7?3???S g???????P?Ys?t?5?4C??????VZ????{?N????t??????R??r?d??????I??e????K?xn[vb???????????%oW?4cz?G?l??1 ?gO3???????sf????????uxTQ????????~??/w)?G?9??o??w?T + ?? +????n???z5;%????????4??r??????W?K?? +?????????c&?v^??)??? /0?YBIoRq??Eq???(]?Z?????v????? tg???????vcO2K?9?]?????j? ????????t????t?m????]???????P?Fq?ic?Ko%?*?q?ZfTk????,?6???????ur?P????,3????/!Ty ?_?Pq?0C????\f?}h??????3????V??0?D?p=#F?????????@?x\o{?I?\?0?K??H#u?r??Z6?_#??|Pzr@?? q>TH?#A?l ?????x?/?? +q-??L???????|??????????-.?????8p???????`/?>??q???? ?H? +\? ??2?}??c-????#k???:7(?6???*?G??fl? ???"?c??4???;?(p?T???Rp??`?5?mX2??yd`??? ? Aj?T?} j?ZHp??M?? w? lu\?????I=@?d#??l}?P?;??? ?8?L??s???U????? ^3??????b? ?Y?@???? ?q?? (mWC?'H????9@??%P`g +??Z??? +x^x???e?-D0?A?J? H=A???B`? ??P??|??R??"?H?F1?P'CP'B?i??7???.?H? ?c??:j?? @?P??K,A?5=????M?-D1?A+r??+(?=A?? ???? +?!??q????+y?z???RF??\?M?o???j???F?(?5c???>H????d#?? t ?k ka a??A?m ?A?K???#h?? ?i? ?.?(?:?;@??>F??:E50?i????1z9c?v?9J???>fQ_2??NfU????3??FF X???b????@???Zk? ???BH?9B????^S?T????? ?6?A???N/????????>?}?????t?????6?????????? ?Zd??V??5=???;6???????? I*???8???M?zM?O???~??-??Kr???????u^??-"???pJ?D?f?????|?t??:6??!?{??shK Y???????(???????lr?v+?#b/???Gn???5?7?ky??????????Ez????i?S???&????? o???^ +?: kP?? X??53 ????y???yl??l?{w???}jB??? rk\>?U?N? ????c?g??aOT??&e??Qq-\??Q< +oT'??- =?p?d????,q?M??t?3?l????K?Vu??nn?:??????.`WM?Pyy?? Fb?VO???8q?bQb?6?v?@??????P?{ ??tK89v??}s?sbn??? +,"w???_?q`???i?[?l???j?^?b??P?????>???&???2?????*c??? +??ziR?.? ?h??"=???????v?s +??%????D A??H? ??:{9?i???V0??????m???Ac?????o????_???????R?&???(Y%/6??E9?]??fu|ji?0????Xz&????b?nN???.?`?UN????U0??????u}9Xzw-??>?Y?y|??r?????GT-?~?fb???.??????????TZ?*?\S?I????U?A??6?#(??cz+?-_"]?y????J?U????@?X??`?>s??????????????????s????e?Y^??U?d?I-J*?R????X+;?2? ?Z??ON?;??- t??u??Y:?K?N????|T:E?B????g????h? ~G1????????d????@??fgx?9RJ?????N????!??gt?Df????^?fp??O??W????m?????F?cN&?=-R'?'N?????1S??????i??P???w??${?f3?2f???s??H_v?????`8W???q??<7??Tb;p?1??????f??2A???F?s?M4_????c A?2 ?D}?? ??? b&D-?C?2*hV?@???\?@???JV??_?????Z??? ??r{??u??????GhS+Bi?)a???L%?a& Z??8? 4?`?P?cK?$vA? T????? ???-???h?+?rp???.px?!???T6+!?fd?:@??"?n?~0?W?s?%p??@??)?d??A&s?? ?n???v?70K|???B~? ?????\??\???7qzR n?T?? ????p#?>?q7H??!?? 8C??b,??? +$??E?9??G?T:?8? ?k?????~s?_fC?0???????s???????? ?}?8?)a???0L?D????)#?^??K|?????c |+? ??X? (??A?1 +g?L?>o????`? o???S4??-x??'~?y?0?? ??^"?2??#???a?c???4@?H?????#??P=???:P !v?p?-A?e??@E?5??GPF;C??}6*?kZ??wRT8?/ ????p??'}??! ?+??a??a?)??ZJ????j?t?z?:??-LO???,?"?C??DyZ??o$??C +}$? Q??W??'?c??)?,????U??E???A???^???$?a????3??BX?B\C@?u??Z?????,!?G?V??\~?x>}?`wYO???}C??????Rr??5????;C???m???Jq??2????1?g????????????o??;5?a?=????$??d+? ????\ws???b???y??R??9???[6??y?1??Mf?!-??"?0?^??I?n??@xm?_???rQM????q?????;????6? ?Pd?b:????*@nDb???f??d +??Ma????_??"?M{}QKa????(??o??o? ?PR??8?LNYLy?o?????g?M??y?=????????H?c!J- ?(?"?QE???=|?C???7?F?k?Z?/1??H,?oT:??\?i?/???:??n?n\ 7??????????????????????R?K]1??? ??5??E?[??G???????e?[Z?$ ?$?NZM?.gx??&???? 3?????BqYlF?????????\?'E4?Y???>??9?? bi??t-?/P?{?*0?????xl?????M*??w/?kq4?PxlR?3o????;|????YT?>?W?=$*K?*9zXP\?+?e??? ????v?}????G'1?A???/Y?0W???Z????D???`??1n??B???~eK??Y????)ho?,???!?],e???&f????UeGq+ )??tv???,;?(????????????~`1JS???[???????????????]C?k???z{?????????hZ?`U?? VD?f???c ?#???????4??YWI???I/3Z2?"K??"???|`FW?8eO ???T?<p? `??^8?\?????l???p^S??iR??o)????<| ????k??Mj????C??????hrScl??8=L~?*??=6?????6"????3?????b7???'???9??G????hW?u0,9??QC?E???'???>??????????v?C????c?kB??A???j?k6:?kN???)??m?\ 6??e{???|9?9??%?R?G????q?A?x????a??????`B? +7???? ????L?%M?;?N?9{OV??Z\]?????? S????|??| y)s ??x+????mX~???fT? ? ??p??j ?6K?????Y?U?Y?u??Cf?L??L??8?|??Xz???????:<"?R?F??????[?D?&:?3I ?V??K?>] ?????Ob>?F?^ +?????????X[?']?-??l'|q?????>?O???q?q?G?9??/?(????>@?????Z?fLZ?g?^??z>?2S??x?!?V??@?? ?>? ??#????L&??_=7?????+-???'??5Y???X}`??????F??I??WH??[?!f?i?9h??5j??9i??9k???O??MU??v]?})?2W?z??L?-?"?7H5???????r???j???????z??????~????????W?K?/?7m????Y?*????5???v}?f???V???Gi??gi??{x?o??p???????r????x????7?X? +??p 7?A<?}$?i???x???Hg?????R?'???~???@?es/???RJ?#??????n7?o?8w ?\?t?\v?*?av +?2/ F?.&?? ?}?8s???8?^?I+??E?}M?U{?N?|????Y????/??} ???DRO?z???\j??b?k?2?U?o.?? ???]n?B??? ng??g???? p:?o=N??x?x?????GB????= ???&???*?y??XH2?B?fr? f$?}Y@?=*????c/????zz?y??(?????????r\O???)??GUz???}(b????V?6? ?V?????w??_?????????r? T jR 0?????*6?<-?c?J??z??????]???g?mN??SO?(?????????n?l?/?????????%z/w?QAs?E???;???A;???vDM ?@??o Z?q 1????]??-F^????l??n^?????^g1w???B?Sd???b????R?X?)9?}??^-I????U???M???;?o?;'?{K\?L\7??&?????@?>??c C???CFJ??g????r#?v?5?j????????@?cC??F??e'0?e???Y???#5??)??o??"???:A}????????OK??v?V?}?5???"q +!?? ? ???.?a K-?B?)???r ??r ?]e?ry????-?%???+?I?r??yq???d????9?? ??xuie???j??Rv?+?%['?e??d??2e??T?? +U*@2X?v^??-G,VE??T???jHO?b??????M?c????-~+???V{7??5?????X?sS9u9????^e?2?"?*?l}c?2m??$?xXq??5E?Q?? +S'e?i E???X??|?UC?`?? +?w5?Q??Ro?=[G?:V????V??????R???H???VmA +?*??[???+?P"Pfom?j?e??d???????f<@ ?&?r3 at 6??B?Q??"???n%?]??\k?C.n7?t4?i?c.j???yOe ??\Bn??X??cVmJ????2??zm??5?X"???0??S?a?;??w???J?PI ?J?,?,YC*R?Z?zZ???s:5?sN??T??????{z/?????]????????-;}??????iyA;SJCSv??''?GlK??$%??lM!??8)? ???? +?/?4x?Nzo.? (?[F???J?]???Y???Qn??T*??9????0??$_lQ?e????/k???=???w???f?%?M?U?egk??????? K?&"?0|&0Z?m????n???Vp?Z??????-?r??Z_!`V?y??\?.,????????)3/?ugN?WJ?N?mYYA[??$?9??xD???????W?d??mDL&? ????Tx?????)??]?]uT?h`??jc????'zY???????C????U???ui?q???D?????????W?2&?R?>?E?6?K?&?aP??6"j???S?a??C_ ?U?=/??ihkV????dk??????/(?w]?W???U???Q%??VD?m?+H,???P????$???@^PT?????f????X\??X\?D,.?h&>m?? p??:??%???5??y?t?jP??#Sy?D???????VV????+?? "?m?????F;?T/?9??}]?.???9?A??#!!?g???? ?????????? %9? &!???[N???? );?????R???L?h?X????t??%?d????Z/????m7}Vj|4??O4??????w???w?'?????zR??G/u?i??t?{?C??c at C =?r???J????GmCU??V-??~1O???????4W??I1?=??????1i???0:?xkt?1n?3s???c?1^??2^??????8}?=?f#s??Z??&??h?KC>??NFr(??G'Ez*?s? +:????6n??i?{^???e6}???1n^?3;?|c????????????????s?^?g?h??>d??2?p???M????&??{?t)y:4t?????4?4?G/;E?tSF?@????w??O8l??Ke????|e?????D?9?B??U-k?????????a ?.??x7X???Y?,h=?=??/?l?{?W?M?5?M??????2???5???????????J?k?1?+?E?f??pULZyQ?\???;???UoB?mO|?C7W?|$V???S??SA8??j?j%?Dv?6E????o???????_??$???M??4o??Iz?m?.x*?SR]? g-?[k ?v?^??[??A[Jj~<5??;9~kWR?cGB:??>?kk|>?9N?u VEl?????????GR? ??LU??Vr>"?K???o?]H??????u?31???cE+?P??B??N_???????G?C?:2?,?28?M"?S?0?E?*vS????????L?L??v?3 ?`??!x?xG?,?2?? F??'>??dHw?? ??4????????Vm?????]?k???????P??T?r?9<|?,??P*??????H?H??*?D|?*N????1??[!??Y?[?P?@? M?????R?{ ???j%?5???j?QY?;k\?Z+?W5???jKC6T????E?????y|Wy??3;G??)+ ??+?iY TAf'#9?lH??&b???$??H?@?& ?S??;?Bvp??K*??????5?h???!?6?&?? +u5i}U%?\Y??Xi?????S??*-zdd????yed~n5A??????????=F?are_?2H??0#?z?QdW??/?~Mn????I?KM?->8?w???????1x??Ua3?l??{?>mwNl?????]? ??u???^ ?;1n$??&???/?k?.??????/?Av!??S7??3[_?[}"???x????)?EG?z?p8c?????y????Y4%wCh????M??`dbNaT|N????To8??7?6?o?#????t7?%???#? +?$=p?9 ?:?B??y9c??@?????2???-8=A8?(Mq@??d$%d???;N??\&;@?g?A?+??]????!??&??C?3)$??Q??5???FT??4?1?\?8??8??i??lZm?j?b???_K??y_????h~???I0? ?N??+???/??*?}?_???<?PK??V??????"?e?+%5?????O??c?;?M?]?f~]???3Yc??e{????x??s?>?s???s?i?I????Sd????n???`??E?f?$?D5?????4?!?? ??c!?????/??? ??>?i???t??4?d ????9}Zm??y???????_N1??n0C??#9L??[Jn?d??d??d?Q 0????h7w????? |?G?????@pC9?1???A!j)?C?D?-N?Y?t.??0?a?? 3?c?b?7 ?????dV}?}?S????U+?O/? ?h??? BkpF;p ??F??lP?? ?0?, ??P0??A\DXM????????O:J=?(???g:S?????d&Jg???bs?4G??????????{???s??N??N?~?3??N9}?7??d?:?=?z?[?KV?C7?S????_ ?C?~ ????.?????c??T1Pn?~qf???DY?9z-d I??>??c?mb????w????z??'??~???J??????w?k?q?[\?y-z???d????/???6??U?#A ??%??BLG5?'?????&c????? ?g?y????ek9?7r{??q?ye?z????=syo=???x?^{??uy??:=?y?J^????*m?>????H?-???VMR?&?| J?h(w??????$?uA&i6?? Jg?????+39=?+?o?????n????a?????????+E?e???????e???e???? ??y?E???W/????????G????D???Ao ?h?D?2O4y?hcH?5??40?k?X?a?.?nG?2????,???Z?m?z???~??o????nV?nTY7?.Y??J??)?[?*k?jTm6??=?j??]??+p?H?GH?q*F8P? ?b?' ???? ??????j???i???{?[b??]e????>l???????{l????? 9C9J?@v??1K???/?d?)??>?C???q7?3??????????7???k???c????,a?? +nK?`?b,?Z-?IfEG?M ???YN#??.?En??1????%???S?a?u???? ?3????"Z?o??????"4??Xx??-??|?|??`U???+,??t3\??a?????J?????[G?h;MB?????r?_??W(?6?$?.????? ??5u???-,c?&o???K?????`9]?n????Tq|??8?????b???E??????"i??P??5?4??U???z?(I?Y?V?o???????D?9?Hre??y?Cr? +????j?o(?????p???rB??L y??,??de????,|????V????J?????&XU? +U?u???(y?Ht5o?\???"???5Y?{#?a??o?2???D???w^G??G?q?~ ???????l}??P]??????{??vwT????\??r?W????*(Ke? +Q}X?PK?*1?*????9??x?*?????????Hc1????e+C=??6? ??9????? `?tC??e{{?Z?>zs?.?^?????????s?????ej?_??,?? ??7D????????\???X?f+V????S???Pr?l?S? ?P7????sQ0??o???c?#t?zZh ?6??????C?k:?;J?S?%?,W???)h-?-???-?`?F???%f7??????t?ErZ?'?g?4????? ????zQu?]??z???0?[@??m?]qj????T???????}?;??'? tQ?7}??=??????8?+!b?+?XB?7s?"?v?$@? ?z?f@?Pxq-p??????,???V?T???cN,'oJ\??z?\?gX?Y???k  ?$$??  R??H? 2?mq?]?e?uvF?YgtG?u;????b5?X?????TQ?????)?????}????w??eZD??F??????6!???@]??_W???U*???*|????~r?.E?(?{p? ?C?/???p??^????????R?]Xa)?6?t? ?y???AD?>????1Q??p}??V??8X???????~??O?>?W?Y???????????rB?^?i??????+?{`[)?F?????[)U??H??6?????T ??]??????8??Y +?W?A{?e1??_s=?????k?i??pz?u?KS??sC/O]?b????9<??????U???'_? +??4?dh-??~`??T?)?\?D?o?????W8?R?=?)@D=??s????????H??`?x`??? a?R???B???????????e?^O?$???@G??????g?HC"?gRH??B?9?'? ???????vj?|5j??1;L~0;N?7;M??C????i99??"??k?>?6?????2$????+>????j??Hn?F?x=N?R?HM"?d +?f????? I6=&??o??????vRCV{?A?|j at t?z':I?????%?? ?Ox??????V???k?C0D? ??v!b????J???H???????@??$?D??9?????uXE??[G)???M?;?V?_??~c?G????{et??w???,?-?DwI??W?J???)?n?B?H{??!?Y:?m?"?F?d?B???l?*e#O%?????/??B?q?8???"zP??y????s?az?????L??6???7???^?S???Pg?U??6??U?cZ?? e5??l??=?:??A?j??Z?xOT??s????\?l?%a? 9?Q?'1L????}??^? ?{f????83????o???o??????p??6??>w=?mt=?mp-???^??i?rk5O???-?4=?\????~4?? ?^?L?9??????E,*e?%k"??=?G?i????x?#p1????? +Yo?0d????????? +??? ?gZX`Vx??N?y??e??z???e??DW?'?J??#??i?B!F?o +(?F?K??|;?L??j??a?? '????1i?????s?jR????????M??[,o??*??'( ;(??] -????bU??_???/j;$??w6??c???H6?; ?? ?y??M`p?9?Z ??T;x?:?????c8?I???3?o??2?>????????D?1?Z???*?????9{l??9,-?:%=uIVu??L?c?SQm??????Q????~*(?4??~ -?Bo?!????.??k@??O?Y???8?wma?4u?????? I????%???????3??lO?????;?8Wh_[?p,?????????C1}?????1????????a!??lh? ?.??3??_??[?}???PN?q??KSM??dX-^):?p????l???om S~PK?bw$i?C????C?G&?UH???/??yo?s?? ?.y \v% ??)?|l$?7'??7}???+??d%U???v??e?$JV?p????+Z?dz??4???L???Y6G???N?Q?/??????(?/???w?.????\v??????'?e?????? ?? "??? ?H???????@$B !$@B?["?l"QA at E%??F??cqm??????Zgj;?j?????????9?97?~z???9Y?????x???????0???|?????Y?2n??????.???D??qL?f??j? o??.3????r?M?\???v? +??Se+[S?^???$??OSR?!???t???Y` ?^`5?e?????????v??L?{?9?????D?;Es??x9??G?C??7?l???3??x??Y?[??????6????]?]??Z?f5?kR;?U??U)cA?)?A)_+S^?????A)O1N? ??p3???? ?}???I???????R??@?????[????-n$96??-?dg?6?rW?eI?j?%?jN?oUF?_Ez[`yzP)?8U??D+I?K???Se?_?e???1h:?8???_c_?c???w_?g?f? ?8*?"???L???Z??F/l???????$Lu????? +r??<#u:?&?-pqxWq? +??2??P?5 (??G?g?Qj?O?qA?|??Zo?*JZV)Ms)?dy( +r?db ?H? K??q?&X??E?? +?? ? n??OC?9oC?9??#m:o???Sk +{???Wp????C? ???:?|?-???M??????V??\RQ???L?r.)?z?xJ ?$?TNJ???|q'????):?M?qDO??????+4??y +???{?.??U?? +{'r????,??QSM5?SE?UUm????]\???(/Ov.,?p/Pd{?JD?\y?/_V?U\?-???I?N?????X??a,?O!,???4?qO>???=??????15v??? ???AS?E????????????m?2?.{??$'q?UX??"?"oOY??-/?O/USX?Vj?BG?S2?,????B????NO?i?y??? ?Efpg?Q?4?"mh??P?????D? 5W4m?_???ZR??NT??4W??????g?<3?%?4?????:?Y??Py????=?? ?!=^?oZ|?{?H???O{?g?"?`?#]3@'?i???f {??A??C,i???F??l^(l???????i??q4.?&?Gjc?WR???P??_? ?Yw?? ????M?]???7???????????0???c?Do_;@K@-v??ns(?qY?A?C1u?1to?????????6??i??H[??=?9?]?????sgk?wLK????rt?q???W??????????????j2?`?o??8f??~]f? ????~?$?E??s??Cd_G???[g?>=?5?0??n???;F??1???)?O???O????2??k?????=???z??^??#)b?;RD?q?t???x???3@??? f?(=????d?8gt?G)????Y{F6?N?j???7?d;???v4?.???!r?t??!????N?? u[3x?-l??[???n???y?2??b > ??a???? C#???? ???4y??zH????J???5???2?????E????????gZn? ?"??6????C???????S?/????>????o??1???L? ??F7??????O??d?`b'?q? ?]??-?? z?6OR!?????????kaH0 7??3??? ????????Wt?~W?Z?????5????????KXOg?w??>???8@ ?K???t']D? ?s???? ?K?r??7?nP?zc ????1p+??+??{;?d???Y?w?L??????{?l???fw^????????? ?? ??1?:t??;?<???{ ???S?? ????????q?o=`?_X???E???-???.????XHpzRFX?TKtxz????y????????&.~?????yo`?????????c?lBw?-???H?p??|8?XN?Z +?/??? ?^Q???5??u$X??'??????Yk?3?6?? 2?\F?6?-G$?Q??5F?T?{?dwqT??IJ;]????S*???? ?Q?^gu^?_|g>?y?[?Z????R???y??NNO +??f????????=?^??O?3??o???/????l??4{?v??????0????A???~>h~??71?}?s??Nt8t?.? q?6?a??3~???z??*F?K?>??????bz???h?C?i N4???=?@????? ???F?? ?p?6?O?))%?&S3?@???6f?????F!V?,0?[ "??~?D?mL??0??`?^? k5??L??U??!?t4?????@_??B??O?>T +_P?Bt?M??s?Q/7?p +P7?8??v}T??W?=???@?:T;???m???FP+?????&F???M??""u???1???x?. +?? ?>?D?Yo???n?T?Qw????????w?|?Vp ??D]?s??uh?@???P?V3?hu? ~j ?&?h?%*#???l"?????"b???874!??@L h???d??/??x?m? ?u??????H?a?h?Z?h?Z?xi??h?Le???3??Y??47I?F?q?TR??H.1K??,j?M?:I?j???yG:??-%j5R??Hth?Z@u?L1?b??@?'??yz?(??C?? ?????2??b<?A??a??c?4?z? ???}?d??:Y!????Y+;??+???c?Uc??U-kg?%??V??*??z?=????2?G??:?????z?-??? ?\??s/????~?b?O?S?????7y ?vr2??????????8?V?????v>???\???T??r*?U:U?*?yW?????j^r?.s$???@dlZ????? }?????Z=???9{NC?- ??88_?8 ^????H?V???d?#s??I???g???/>?wbL???t???,[?`?W?i?%?l?G>l?)'?w?~??r?i$????????< ?T?B??]P???F{?OF????y,???&?????????G???,S?g??j$N??n????????:?)?z???I9??%I?f??? +q??&QTfW?????=?s?1*?&?H?????s?C-?>?S????C9??? ???AC?1???@e??ly???,k??T??????????03-lrZj??8%?"I?a??o?$??N??????p?1X?? ??f?[?yU??????`x??F??Y:}????] ??z?z?J?K'`S?) +??X9?_?3K????i&m^51??_7?8?0Jf?)?tcQ?yh?dZ???fmA??A????????x????|sG?W?9?g\?>PNR???W45??+??*u?U!k? ???RW?WI????U?????? ++????7??EM.M0]+?0??K??m.??)?1se???}v^????d?v^?^?y1??????ST?]??A??V ?T?P???z?Z??t!??@b=?%?????sS?-????h?? ?????5? ?Fd?W?:?gK?T??RK??z?eU??/??????h?Qq??????G????n?????????:`s=?K-Hj#?????fD??@D?l?????f??fo???5??M?N?k????3F?{???sG???%So?5s??6u????? ?5w??g?R????????GNSv?R????r@?????& s?????]!??p??'V@?B?5??}?}?V?pV??hz?ENX???]?&?u?+?w?[c???V?y?'????j4w?????1c???&N-?'??:)?????????=h?????????o?$?(??R????^G?Y??x*-9???X??????????W?i?SH??:?&??????y???9?u<??????????????G~r?????f????U?\?????`|??????_?????yC?zt?GA?F?7??j???.h????(4n??????:?????%w?N???? +r? N????? ??e??hn6?????h ?A}?jw?22????@???=!?AP???b?O??d?w??O4???S??)?drG?;?g????????v?+??u at w?? ???O?`=?A??{? x?xF?g?1xN?1*?1:t/??,?^????T??tP??w ??h?.??}?r0? ?w????p?xJ???2?I??M?7????_.?[z?? ??G?}=???e??3??`T?X??9?aO?q?Y`? ?`9? ????{??-??'?x??x? +???QO?i?c??#t?!??0?p??K???????i?a????#?????B?????????F} ????:??(?A??????dx?2???pW? ??Vr ?Q?u?? 9.???Nk??? +?U???*???????2 k?????B?$!???j;?T???h@?=? $ @??? ???,\????A??A?nh???O???L}??AF???{???=??s??e?-:??s???m??-;???f???? ~3???????7???w??Oo?yXo?????m???3?>?????????8?sp?$???;o?}?<^??;???./?? KalIad??tfq +??"yxQ?? ?????2?Y???????V??f?~??>?j?? ???9j?? ?~?'?n????????t???~>? /??|P?Y0??w??????`??~??#?? !????(GVH?X?4?[?cx?GK=?SB?YVA;????i????????????{?GL:?'Mvy?b?{?????o???q?????6? ??? ????D?f??.? +ZCA>?#A?I????z??{$????????]?4F?+ {V?w?U????????b???????7??????????j??n?????m|? >??ck??????~%y?P??3??a8???K??V????t??6??6???B??`??????3[W?L?Wo??X?????!`??m?,? [??_b???g??b????]???Z????F?W???)?N!????pL` }???????-???D???? +7l?????'1??d&;B??????u?Y[?????TY??l?? n??>`?)x??2??ME????????A?l???/?{9??`?h?q???& ?x?w?(?/???bt?:??H?q???u?;b?F???-:??%Jn?9R????3??(??^????.k?- ???z??$??OQ-?D??(A??)??~O???m?I? 4????i>z5??J?ZZyJ?49?Y,??%???O??,H???K(??????78????2E}\?h?????? +?_N?????8|??k~Fup^?? ? ?? ?w*????????P?\H??X?W?XiP&???E0?d1&Zi?Y~??\????I??V'?fI*9I[I;??{1Y??4?:?&Mx??%?w?%?3<@o??????B??d? :??U??j ?WcP?v#T????|??3?P ?aF +??Fo?#O1W?+,3???3d????????-???m???L?r???? ????Ey@???P  ??Dk??t?P?? Z??F?MV??g??*???i?&Rm?YrA?yb??2>Oc?)??V???mr?????g????`a???????i?p>???Y??!t??h??,A1@?#*7???.???=????){KI??+;??]?????2!?p":F?4.??D?]PS?Q??6C?v?T$?RRns?r??s??D?|g?c?3|???Z??|~???>??{?????a}?P?IvV%?HW$??/M 2?2a?<*a??[?Dn???"???E?s?~e???????V????LM??x?nr??>0??}?????_???QGkp?sp"?{R +??$[????s?. &C???J,?T!*??2}% 3'??????a8g?\??]?F?;??L???l?? ?OO? H?N91??????fB?E??N???????#?:?r hY?????3????%X?,?????Ey?0???G"R?y??z?37??A?y?-?a????f?0??e6????en???????4A? |4?$c4Qz^??gY??{Y???,???2??kY??k?mC?#]}G????8?A?q????}?sN?w?1?F??G?????cO?aL?)?*l0?R?J5<*???r?? ????Hm\???p?A?M? ?fK?J?U?R??[R??_??*??lO~?QF?~??Ko?! ????~y?s@???@???j??p??`X???X????????h?6|?! ??i????:??7??1???0;_???0m|??????????=?9C$?????*????x??Z?? 0???(/f?????F?VP?:B~e$?^? ?&??-?W@vu9??q??g??tp???)?????:??????|?????????m??????%`P+`?v??R~?????e#2???'?s6?????!??M?3??????9??[:vs?SX??t??;???d????9?/?Zz??U0 ?O???%?: 9?)?V?9u??????Q???k?? ??????d?/?u?1????t/?{????9rpl??5????N?I???g?/?LBg_:?3x????? ??Q????=???r??aKl??P2?x?J"?K?1? k? +q?F^ ]????????/??g?OJ?'V5???!???????'?s???@D?????X??#??????`??W>???r????X??????t?IW {X?"??D????YO?%N?-N?cF??????CDbK?z ????u8D???;???l????????u-??$?R??IW]y??}??OC?I?jqp7??'??A???????QN????Y s'????V???K6????D??#a9}????l?k+]ite0??4??5?WJ_9]5t4??Q\f??.4K??Y??&??C4?Go_????X@l?7?????gf~xl2?F?q?_$n+?qK?7kqM? ?\?oG?|????*?G????%?(?? ?4??q^q F????+?????T?iSQ??h?O/qdC???d?n<2????m5 7-g??j?????\???U??\? ?84+???L?y?.?S?F?u?ZD??1?YW??u#j???y??n?b?;??????@??E=?x?b?'??????????:t??????w??5?W?X+?_?/a?l?,?%fK3??y??????b9e?HE????S?`?%t@??t???? +?#{ +?Y???????????Dw??)?#?????o??m?????W:?1??r???-?]o`\????3a?b;x+??L%???H ????Fe?A??2?`???_??;?F????(?????]?C??"?6[?R?Ur1?Ur;?E??8????X????f??O?a ?)???1??.b?:?[?u???]?9?1??q??XC?M Li`Ls?5??? b_???PQ?U????nS?2????JK?????T??6??cMr/?(??1?nr???su??su?_st???u???M>?^??????='???T???|????*?;Z??~? ?? x????a??\????x?`7l?g?????] ?I+?????:m?????X??5=??5#q5?YNu?Y???z|U???J?G?J?W\???????c\??a?o??????Y??%m??)n??IOs??op ?? r ?p +L?r?&?{????1WQ?B? ???!?????h? +?F{Z??Y?<D?z?!??2? c?(?/?C?B_?R???doM`??D:2o#?????????G}y?u??#????i{O}?w??W\?&??w s??N?? ?ijk????? ??61u?)g? ??H??x?W???(? ??/^??+>???{}-{|???}?????|7????K???q??w?????7y???s??S??q?????pN??G?6??????)?NE??2??m&?&#?"B??hHZFH?hA8-"d?n?f??m?iS???fJ??o???x>??y????????M??y?d?????{Xz?A%?O)0?0?Zn????R??+\??p? ?'|???~?8???!N???X??^??Yy??????? +e?$QDo>?n??3s3G???y)| ??#?????j????jm???'?????a_7??}`[?t?????? Jc,??03?Bn???U???E????8fJ??It??????s|.???????'?F??????>E?? 5r????[?'=e????????\???k???l? ?3????V?????M?4?W?A?g??8?N?)u&???? P?.? u%j?FT??p??p??! v?Qa??v? ?;??|E??:?????ns??;???0???Bs?4:N??H????Q?u?????@?&????????&G5Y8???A? *4?(???????v?8?G??3?@??Ba???x???>????????5gs\tv@??Ah4']??V;???F??v>*?_??]? +m,???pX??C?4?nJ?;?b?^???T(t???4 ?\?K?h?Jvk?J????????/??Gt?r??7???H??U?sCmq????????7*??b????B??9(q_ ?/??? ??q?7??%?t????M?=?,?n]?4_W,?????5?v?]?e???w?~Qd???u?b??;?\??g~5???! ???h%?=Q?5e??q?????Q?.???X??5OR??O??1_Jw????????z????L??xf*vx?*?< +??yT?ez?2??q?|?????<~??????&?????#`??@??A?w???? +|P?7E?????7???n????7C?????????????e?_"?>.V?9.?l??d??>?>9}?Y???+S???^?J{?j??'VIc_??}-?{???@;9cO?G??Dn???4Y? +?&?)?:i?lK?yF?"??????'?4O???2eB?2yB?U???u??Tk?kU????????????W%????w?a ?1?T?9y?W? +u?a???T(?k????(d?? ??I??az??????(yj?g??? ??'/?H +??\?Z?&h?ubP?*!(_x?[\`??W??mbo??|L~??b?h?6??J(??{g=?}GH?L??????["7?'?#a[?;6Gxcc?Ijx?4e?4?????uS????????}?i??d??a?2u??K?? ??"k???sy?>??a?Z5?h?LB?v6???P? ?iB?Js"?E?q??????5?B}?0/7V??Mr???b?N??????4%?Y?j???f/y?IZ?U??>???>???????z???z*KV?J???6??gY??|?[????1?&???@S?e?3PR???/#?p?@_n?+?i +6?g??9*??N???????k??Qi?f??e?gX?????C???"?\|5?p?*?@Q???Bo\ ?19?P;?1J?,?)JS?% +qZ??9???5??bBB?&??B?wl?>IL?????geQ?7d?yC?h?2*?6??????f????? me?&R?1???3w? ??????Gv?B?*?U"?4E???b???$??? ???l???/??M?K??p?{ ??@8??????~9?|??W&??d???Mx?|C?~?????????F???{@ob/Ig?;@/???|O???N1???v??[?a????P~p??!`??A?Ax?I ?>?????w6v????????n ?m??3??t.?%s??F??,?z ????\r??\??0?????C?UG??'|_??:? ??Qn???^?????sk??7???7? ???9?No:?x?6z???????????<9?? ?? ?7??w????0?;|?? ??!??bq?l4?m?[???z?r??????s??A?????M?9??|E?^????X?ry?????4.n??GA???)?f??Gx?F???.??!?DF??????/??R?*@?c??w?w`?m??x?a ?[\`?W?5~?+??0C?-??w???_'??d??????t*x???B??pU??????-t?????Nz??s????R???r?j?>???}?$\Cb???:uN???4?????k??@??7?)PAA?? ?J??]t??vN?m?\w?l7??v?7[?f_XO???p???y????????C\e???}~?.?}????3?????}?2?L?s???9?F??@???l? +?W]?t?1??t?????z??T[???gf?'q??>??F?6#??j??7?%? ???{??H?|?)?XOo(s\Lg +??3?\????`N5??????U??e??t?????>????????8??G?=?x???V?t4???Q?????????o?f???p\}%?_1??????????? ?5U8???I?r??Y?~M;?k6??f'?4?8?9?^?5???????w?g?3?v????g?1G??'p?s?i????F??l????U???Z?_?????a??? ?G?B?????p????`}A ??=!??#d?? ???l +1?;????L?3?%t???????Z?-?QZ?*k ???m?7?*???*V?R,??????????P4?? ????x??2????~??l?`W?8l ??'|????3< +a?X?"??? ?f?? ????%R??JY??z?????????-^????B|????? }?S?/???_?T????z?g???Hy????ct&??0? ?h3??5:Fh?6 +?,bS?]\?#?Y(k?,??E.U,[???v?Z??E=>U?}+#N?UD??+??y?W??_Y?w?e??a??????7I?A +?? ?c,;???'???Q???????h?????h???????>?&-???????kbJU1?^K7zW,^?[?x?_?a??????pMYhx_Y??|?_??x??x?^L?}?? ??.???qt???F`??5?$???4 ??y?7-?c?Z?I?N?H????%?.yyB??4???$???(??? ~?~?n?;?? 7????.?g?+? ?+?9?????&s?h??}d??w>??a???hN???S??:?R?QcC?? ,II?S?Rir??8?)/J.?*H???K??u'???L?J?ig@???*?xE?0>P;??P9?p$>'??Y??????}Hv??v1??4?9???1??MB?m:?l?Q????H??'Z??k?,??%w[??\?2o?e?ov?JG?z???C?a>?NOT?R???????U??g?d?0wY????t??l'??w????hW??> +????$s*?2g??>??!?#?3??+?"?dd??3r?????? [???????mSY??????j???f??j??s???LeN}0??X?A?f?pr/??????f+O]??j?T8????s:g"? ?3 ?l???N???Rfv????R?g{?9?}-???????????$?a??~Qm??S3>R?????????!?b?/?y,??@6??d%??w?????D??U????u???Fv^(yQ??/f??%??&??:???^)?J?$?~??6eB???8g?*?yA?}?v????#?????:?=?(??a ??G?I Y!??UK??HutX???\?????U???m??j??? ?fY???Y??b????,Z??x?7 z?????+?Q ?D}?80??g>??q7?sB&??????{$?w?~K?.??v?z?YU-?u???@q?5u?PW{???????!?v?R#?T??LS?C??%w??~?1Cx?7??e?6?/}??????t?",??u at k:9???-????Dk?n??<_?TPTA???? w5p?(??Bw???????????5?x?v?TM?B??+U?]???2U-?????Tm?#x???X]? i?{?b??RzO'5X!u???-????`?TgC?jz???????y??i?*??U???*Tpq?*O??3W??????_n???p?K?? ???}?8?!?|?$?u???A??????6H1?o??E??M +???!2x??X??x?zy??\?;??^6???zI???ex??H?q6?.???.)?P*~*???9?W????~??z? ???A??x?q??q?????(n?~g????\??M??WK)??????4 ,?s$?5 +u?~???0???J????o???|u?/?w(???F? 5!BO???~U7??uG??z ?I?f???c??)O?eX??Z??E?g?.???:?/)???w?>??????)?K^?????H?M??'???6?p\?i"?i8RX?,?yx?D????b???]??????M5?_d??I?XJ?E)????????ZW?]?x????u??????g ?L?1]?XGYG??g??2jrH9xVj??h???)???}_Bw?Y/??A?w?????2???T?z?????"?v8??#?!xq?Ixf??E?mx??8?<.<??????[?v?/a????bV??zx????#?~E?;????K?[/??? ??:?\?;??BO?5??0?2?E;???5M?? m3??V?\m1-?&S?JL9*6??c?X?M[???_E?SZ[???????r??r??$?????:[?c? ??d ???TQY?p????5Zkg???n????A?l~_%?Q*6O??? +;????ZQ?G?j=?gr?*?q??H?"?????(?t??????^m?o?-?????AKw???S??=?????hZ&??2U?-)????g?ke?C?#\rE*7?D9{?,?s-???%???8???? Y??>nY8?8????????9??[\y?~????6FGh}?X?m?Z??.*??R?u???C???????rY')?:]9??Zj??lk??Xs???V??c???2dX?Z/X?7???????~??r7??+?';r??'\?[9?K??'??????2j????yO????k?%1 ^^????U??Ta6???F??E?D]E???? ?@?????_I? +?,n+???Z?/| z?u?????C?????I?l:??i?ci??Q?:$?[0????2%??b?i+?????? kx&?ib?5~?K?-??? ???????_zJ?R?(??^?zK)?????R?)?KLu?1??????????,?L??e*@???&??*????X?z???6???aq?1)?7? ?oxY?~ ?????Yy??%s??J???i?U???.??z??#?? ?:?`?"D?? a?t[???? mZ-*?,e?X?h??0??u[? ?6a?%,????u?????9??|Vn7_?????5U7?[2??q_????+?{d????LI????z"D????o????V?xyhv?????Q ??????,ow ????Z{P???m?1??6+????W?|_n????????zKn????-????z?\Gk??????!1J?1?????}?~~5||??l???p???K??:??f??w0???????Y??bs????????????+k???k??W:n?+?wd???i??>_??????EL6P?????C???? ?????'?ro???[??F5????5?`u??{?kv{&w????'?7??\S?r?iY??U?????????7eZ?????^????Kn?(?L?M???^???kk3????} ?w-j??`??@?/_!?}T? 0????W??}????{{EZOB\?9&)iNI????T???TM????.U?oK??{i?P??I?E??^Z??~?M?@??m{5??ai??9?S??!???@1t*f?jM??????????=&*l;".h????/I??K??K?-?K?????^???w???'??7L??f???`'`???b??CZ???f??vB???p!J?Z??P?m( +??}?&?d?!????07xJ??????~O???????\?u|%????BZ/S???? B??J??!?????K?3h?EI???3P???}?D???C?ao?y?:??{??w2??????qv{? v[?Av?AV????d?|%???i?$S?1"B?3x???t?)????&?bb(???'???Lp?m?????Dv&J?#a??? ??<o??x??#???1O??1?????????e2???f ~?? ?:C??? ????wi?H?2?&??`7??2dO???????????????Cf?O'??1Y???u???b]2?????8??g?j? ??+?>???a????S?G*?>ZQz(:I?|??'?&?\???)*n????c?? +??Y?'f2?fv;V??a?l??V??Y ?ua?l?? @>3 ??$??B4u???? ???if?1b?z??[?????? &??MS??????b???? R,YX??ZHS?@??Q*?? +L??m<)z?h? ????????4G???yh????"???-?'????)E?QW~x?, ?K????r???"??E2??d??? 2?i?=O/?yz????????*?1?Y??QZ4?n???(?????|?y?s?????????}?????_.u??????????vI??D??? ??s?? ????l???;????9l?)?j?f??m?S?%??J??s6?N#T??<SC?????a?? /|??_%?aB? ?b???}M:?pr?)M!?1??4?'?? ?)?S?i>M2O??i?x??+??L>N??????]?????lw>?F???d?J;?8?:???`P??a?1??9?r&????q???2?q???J??n at 8x??+M!?i?:n?4x??n????|??Ju????#? ,tQb ?b??E~?18? &?0???(???{4G??~?6`?????j??^C????s??????????(F?1??e????y???r?g?????u??????y]??Y?t??? +S?%?v?Y?{}??&z????S-3n?L?>j?gzn{?]?L??????Y????? #?D???<83??)???J?]Kh6???U??????,+????????j ??????????p;cu0>????g??0????o4?????"-s?,??Tg??????6R?W'(??|s ????????#????7ua7!?o??_?:????%??????????E?,8S)?ip +??????W@]??T???E6>f+?R5?U?Y? ???? /???A]\&????? ?? +>8??%F?Tl?,9P??? ???^?}??1????????I?v???D??+?V?????????-??_?$?v?]?WoX????f?x?0???8Ypr???INv?Jb???S?h ????Md??F????,?\FmHKm???U???*?y4?6Hsl?*??????Y?????t[?????o??\????)?^?)?C???3?,?U?$?]?D??D[ ?5\?8??>1?!?D?i3?R)m at Ig??uj???[???^s:?k?????PMw????????i????C1BC" ?#?#"?M??? ???Wf ?]'4?(?P't?-K?????????6?,???O|??[?????@y?F??g?!?dw?6????? zn????jXK`?Pn*wm?F,?M????? ?h?!?5??A??E9??xK??p???#^???r???#????o?????Qb????????G?ES?????'<4?=1?=7???^?Eh6?i(?8`?*>z?G\2??????,?Ed?U??????>?? +??P??A +????$??NS??,?????,?!0k??[?Nc@?1c?????2L~h ??=3??WB??`???? ; vR&?g??F??!?????? +u6W?????????7?}??Twg??9????????T'?"u?]??j????????S???????-?????B?\??j2(??)??XR? ?\Y?e???C??X~?]l/[&?bd?J???{_ZGh]w????'7??i????KHK???J?8???????M??????*?b?UxQ?1?\F????9?=???yS?7?G??l???a??'??es)?8?:???c8?zS?_?d?kH??R???5????????????\??E??????9???S?c1-???9? -K?HJ????0+i??$?iI?$????c"c?dLa,?F3Nb$??3?"??c g?C???`???2?:?s?\??}?|???????,f-???k??? ??y8????t??LJ?0????l7??}?b?4??4??4?!?????Y???s??|???????9?'????'??6???K9rwr???;?/*????{??_???tLH?((?xAFe?/?a??C (hBA+??Dd?????Gv??'?U?et^AG??!{???&?M????A?\}.q???????1????C?e?d9&J?aL?#?| ????_??>y%"r;z?.??}???)?DX??Q?)f??8??Y??????? ?????6?G oq????????@???? {?d/???)K??2??m?*??OY??]J#?J+:?N?)?? ?E???2?f??i4?? {Ug????U?)x???'??e??#?G>?:>??x\???~?4?^?dP?Quz?Y?R ?V?]? ?jB?J?v?n4????n?_? ?&?F????9*?k^!]1?ueh?i????????C???z}C7??!? ??pDpN??%?F?]?k??{?????b???x??K?????????L???>????iZ?6??L?r?7?k*E?i?!t? ?P{?(?q +???q????M???o??V??Z??V????r?H??q ?%?oo$?s???n?????3?5}?????:?Z?7F4(??64J???????"??%??l?W???L?U&??9i???????;???????? ????:???3:?MY"??????B?h??@?\?Q??B#Gq?9?5??7$?m????A???????:?^??p??g?K>???t_?IuG?tiY?J?????"?T)]??P????nr??Hw???? +?D]??v:?+t?W?3/CyY??????u????????aF???? g?t?y0 +`?(?UU2~5c??z]d??'q?!q?1im?????d????G??????????Z?a????[~?????? #F?.?????s?????M?aT2??F+??F?m?ZI??$WO`?q}???g:?? ?????'???o.?e?+? X?aD??????Dx&??k?>??LC???j?o???W??[??3L?>#4??K?>?;Vc}????P?? +?????W+??A?~[??>?X????{?C???o???o??*H???*E?9?2??i|&??@[E???@???46?C?????1A! +?TPp????_??(?3???5*X??lQ H?"?,??( ??Z? ???????1?? ????????=Kl??1Q3??1??.7??s??xe?W??????????s??H??.MJ\???????EO>??Qt?????????p6???BLx ?\ ?Z??E????l~N7u???CSM??l?)??J4)!9\??1?KNTlr?&??)???h?]? e^?H?:E?w(3V?Y&?d?)8+GAY%?U???e?????????3??d\??}?=T?k???a????5V1?EiY?N?Zb???Q??k???2????y???^??'?b? ;?;?7??Li\?4z?????(_{_ ??? ?j?}????`OR?{?^?????T?????????????????U????z??U???T#?xXS;* ????????????^v????]?????????eh?04?(????$O?`?O?=?}????????#?0??5^0OM?5j??Rad???(??%??G???q?$?x?O??M?p??h[ ?????~{?H?[F?!-~?_????????x?+?K???+ ?$?0????4??e?B*\??s?203??`?????A5?t?????w??>?v??V?m??????tqz?????'? ?5NOG??'3???~6???~[??F? +Ul???'???Nj???l???d??.n???????zJD?+??}>????!??5?*??ekF?? #??`d??iql'?m???8?h!?V?z??Zl?_?l ???:???R?F????o ?{????|{zvt$??????' F ?$Sa????f?h"???c}Y?r?Y???R???*?|?,~?t???7????t??\???m????q????Gn???Ny???A?F?M?HV???g*9????? ??,8??T?v?V?] ?\N????tt ?v2uN?Q?>?b/w?>i;^?9~??????J??iV?}`??Oe_?ENq?>?|Ra??#?a??????? ?r?TeX?E?UZTah???r??????}?~B??Y/??~?o??6?????Q???M?/j?G?y???9P5^???+ZK??U??$g???dj??I.??6?3 +??]??zTc#?H`?%@???$?%???]6;fs???? uj?vH??u?4?$?%i:?N3]????h?i2i?L'3m?3mm?;?????g$>?????~???????.??0?4?x?}L????o3?|???gL~?????O~???w?=^?u?????????????s?n?d*w??zlX?? )?l??hJ?S?J-a:??????L?g,????8?i} ? 3?6??,? ???????c?9????ex?N?M??:???????v???%?H_?????????1?"?l^???t&2L??2?Q?pF)C? +?????76?0??k???8 at W??????E????y??????????/??N???_L????p?2?~??/??????v?VZl94?l4????yh?????????co!??C]????<@??a? ???*?w??F??1U?;????'???????^G???k.??W?lr?hw????B??A??L??J?Q@?????#??&X????j?*????O?/?@e?U*?????x_???6>?=???w?yN???W{@??~??eK;E??????ZG??N???ZW.A???'U?2??K???6Q?????O?{??e??xd????r???e??x|??}?)?????R?{?b?wq?t???F?E??#???8??? ?K_F?J??"??,W????6??I???N???W??P????o?????/???E?n%?k(?? ?R???0p???9?O`?)?C?G^a??7??S????C?g??g t??J????A?? /?G?^?????T??????#? a?1;?cO??=???.??Y?0H?3(?p?C?S??* ???t?d?n?@??G??????Q?w _?^?K??uM?5U???y?????x?OpQa???'?e?N?"?;O?>?????M???{p|F????G? ?_JF??.??4:~??w?qo???????d?a??Zt?@???????A?????>?up\?C? ?????n3T???R??*?J???????U?sR?>?????;???F??????eh??l??Y?j?y????=???a???*?U?F??7A?~I???T?_??????L???*??Wa@? ++?`??6+7? ???l??????^?7???qt???qp6?3F1?????S?Re?? /??k?!D??J ?*6?TdH????*0?+?x???TNp????(3x???,WF?&????3????/?kr???j'?/??#?3?_?`&?3?;?U??B????* {D????(,H???+??????????Tfx??F+#?B??Z?E6)5r?R"???Q???J?|K?? ?s%E?TR?]??Z?????IM?=?y??s?WP?24?3???4X?&o???e ?X?Q?F??F??L)J5e)%?P???J2??nn???V??~^?? J4???| ??5?spS????J???[?@X??U|.GK?E*?^???W??Q??$c??"?|?????d?42;N?l?Fdg(0?@9?#?e?}??????#?????F?1 /?(H?????eY?]?UD?D??(????x?6??&??Tb???4?fL=b?f?j'N???????0??????????Ol?G??&?duh|?^Eg?%?mpE?????>x?]???Z??p?@1??p&#?d?????z>#w????Q|~?&??(.?^?? ??'*??P?m????e??e??c?5???+??G??G???Ly??{[?9???P]po??!]????,? ?E?>??????????i\?\z>'($?,?J?Ip??K3??Z??J)?Z?1(????\??\n?\v? ?Z?\- ?%?^???A?E?y????!?j??????S-?Y_?????b????:??r????2#??kT?4bud?i ???q??z>?o ? ???Mm?????(\???6??5??!?2?E<3??K?^@?????Hq??WI?F?? ??????kf?Z??e??0~??o?rie??2dZ?E+F[q??z?v?b????(\??*?\  ?|?!K??9??$????I? ??6?$r??6?v?x?:?c3~l????E??????wR?N????o?c?~p?w??z???? ???j@?sH]?N???A +?3? ??t?m?6S??? i'K?.?x???G7?75??!?&????P?=?*b7?f?-U??|g?d5H???L????] +?s???4?????n??f??,?? ??0C??0u??G{?=????\??????gA??Le??F???^H???7????1x???,?b??p??oB? ??ptH?N?S??4???????e?????Kpg0x??7{/?d}w2???8m? o(??80???:?cp +PV??a!?????.?F8=?x??K,E? ?~???\?C??\?????w*??G? ?w???&? ?q??i???D~?w???!??K??U?\?O>???Y?q????N????7!?I??N???#??P>VT?t=@?}??????-sG??=?2 at 1z?%*?& +?:z?o?k????????>???????+8??.???????0]???#???? F Z???ojh>|K?????6?c? ?????U????:8Z?h??V???.?@?S??$?=~??w?????????/??_??n(?X?f?7[Fc~?e?????K????????E?x???? ??>T?y??????_??s??o???7?????4????????h|?? O<Z?Y?@???&?9?o????s??{??u?q??d?0?~?{ vZ0=????(Z.??????p< ??/?8,??9?.?1?r??????cs ??????C ???U??)?? +????;?!#??z????}CmZ#?# L?? ???~?M?v:c"??';??????????V???.m&??O???6"?F?????D?I:???M?E???????}qPpX????Y???????t??p +?(!b'?+???n????6:????p???9?g???{|~8?}C???? ?????n??k)o?y???DU?4U?Ta(Py?C?? +9?V???$??Mv???xR??>]Q???l?o?????P W??????(????V??h?<|Yi +P]p??X???mp_???:7?[?????yV??V?? _???bB?JbL*???f??WR?=?b??n?o??9d?id-?U?}6??"7????A??R??????x?????????.GB???dO?*?:WV?"%&?*!?Y?'?T??eY?o????bo??pOq ? ? ???????A (????\???l???)?6Li?????l????$?UV?S??n%$(?Q*??I?:*&e??)???!??%gdL?? ?7e?????@????J,Z?V?eN???|> ?@HI"?3PV?x%8'+?9Eg?b?2??4%=_????d?X??????hWd?6MN????? +O?>S???&?~?N??????1??*^??Pr?|*?S????A?????12gN?)? ???h?]Q? E?s?5C?Y??5_??*,?M?????w?\??4.?????(???6???%.f1?3?@1?n???????gK19d? Ptn?"?&("/R?yfM??*,????i?_?q???_???K??N??? +?;????iT?????rn+8?m@?*x????g( d?k????1X&C?Q4Xa?#??q?I?1j?'^!??T??Q7??y\?yP?????l??5?x_n?P??w?1?uli???Z??y7??Z +?|tV?RP??p????? +)? ? +??Y??????????g?(? /???????????%?B-Z(b3??G?*?????a?^??????9?? ?S?lG??F????`? [$ l?1?X:?C??\???G~%?l+=i?Rm?????V???|?R~? `&V`???-c?]vK????V +H?]-9?W/E/??`J??rxW$??I???k?c-y?#??,#m?,B????]\?=}?V]?????G?#|?e?%D|@q.????7??[???x??O? ??????????R???d??c^S>3pi^?W???H??/#?*??R??f?C3????)?/(?Y??[DS???^??q ??o ??????.S???'K??@=?????s???>??@?F??o?YE???|-?bD????:?o??5??J?_???{|???B????c :&?!;?g?4=yy?<^?'??q?8?z?7???Y?#8?W?D^? ?/?? ~w?Q??*????????p^??? ?A?? ?O|?]??!v!?:??^?K?k?YO?FT?p?V?yfc??Nj??????D????t???w???p??Q??Q??;v???Aj?_???B?D?????????(~?`0`" Xl?H?E ????A?U??f6`??????/?k?u2u3?t??L?&M????i??>?3'M;??N[??d?u?????;?????_?s?w?w??>?oe?C?b??a???y??X#;7???D?:??6???}???"NqO???????+E7???+OKsi?bb}+6???b?Z?`?vlb?v??????:??????e?8M{=?????]??##??????>?}Zb????('?i=????????K??m???????b1??^ G>?b? +qY&????cg?i*???ykkg???8?NO??G8?G8???%?????m??MZ??dZ"?B?????????2?O&\???????/????dO????????????rg?[?????k?3????:??Al???? ":s?0?f?P??,??X>S?M??????????Ny?t????????r??UU?7T????????????>SE?C9?????? ????eh\?a??bO?g????{-??-Ij,HW}A?? +,???m?T????B?\?????iSE?????TV??J???Q?]??+????Y?????J +?/?w?$?$}H? ?:???}??^???????[?\?fU?i+W??Fe?????QR?cDv??l??*.}Q???U??-?????|???Oe??P??+]??m??;?u??U??????V?i??{???6??=^?U??*ZW???T??du?RmWA?S???k??????fP9?#?v/)??? ??o?@?;?C?W???}]?t_??\d????QT???????:?`? 1?u?TA??!?l???????|?p?????r??2??7q0??3(???F??k?????n^|v??? ??F?t??7??fA?L??H4??b??7GA????ST???y???y?1????,?,??|{?5???.?"????n???PZ?$?(??g?X&v?Y?^???c??%?2M~??'&k?d?b?F^???*????-??+??2???????`?? ?f|??g'>??????4/=?(???????S???~??? ????o?pw???n?????3?xl1gn????g?????^?N??G8??m?3H??E?(zUOD\? ???????k??J?t>?&??f?.c?U??U??{?@^???/#o?S????=????q?]??%???W??M?l???|??:.<???L???L?_??n?o?????]rt?s????e??d/?`?7^w ??~???q]?AQ?w8????ZX?e??ua9v?e??Q!xA???hL????c?????g??q??m:??h3??????????4NMk;?4?N???????????????~??????hW???Y???)?9???a,??fpG?????f^%6'??q??q?d??g??y??????16?Vr???3??g?i?7z[p?_ ? ?Mj}=h?^? ?Isp +?? ????????)?@?\d??@?] ?L?????w????>?7t????????G???u??Pn????i??4?'x?cJp~???? x?_???? ??????2????=?~???????z???]?O]U/N???????K???q????`??+z?D}???`??}?>??9??GSx#8???O?]???h???N???W?v?T?????V??p??%p?7BhFI????K?? ?l'?#x?0????LH?? ??~???1?????IK??-mx?c?i????c@'?d4?0??S????z???Q?j:?ht????X???|V????#???v?U'?T'?????C}??/??V????C??J???hu?`???T????V:V??@g&:???N?t:?????d?Q=Cmf?v???b\??2????????8??.Z?M)??cn%K??y????B????? ???'????u??#????Uj????F????%?G??O?1|X ?5?7lR?a????? ????s? ?????}4??h???E??UK at _?t???X?Y???0uEE?c?Q?324;2W??v5G??)???(?????j???[?1??3,_?U?nVU?U?~M?????Py?u?!??????????A?*??-????e,??????5????85???!>K?x????&?5+?J???[T?????>y?C*3????I3?w??x\%??r/?8???/g???%???jZ?a??? ??og,X(51??????S?T??$_?I?)9?L??"?D??????;?Q???*M_??r?? G?Fe??=??l?q?&dM??????????7?7J?1hc ?<0??+???c,?????TeF???rs???L???*1;??t?8?+GV? +??e?????L?Q?[6(??C??????&K?yeg]we??T/?z?Co%???B?N}>h???g~??Kc??[wn?\??r?$??c?='G?\? +rKe??T~~?????k????D???U?^???a="????????? ????)?????????:=`h?????xW?X?X(??????oKU?-K9v?,v?? =?,????YE?29??xJ??uJq??d?!???TtV????h?PI?? ?j??h??J???A ??O9?KA?6?Wl???,?Q??2J??^R??R?RK}J.m????$W?\+?zNq?m?uTL???.}K?%W???b????Q????/?I?fP????.?;?????S?;D??H??'(?<]?r???J????R??^1?9??X???!??X??*?W?g? ? +/SaeW^?? ?????p/??? ? @ ????;'?s??A?BJ?>?#??S?7E1?LEy???:????_??}? +??(????b?|???????.??? +?[???????N?[@?^??? q??n66?d?E?]??hI + ? a$O????]?Xh???ut~N?Z???_2?{z???/???A ???s]??]6??????r]&@Q?g??5 ???,? +?!?r??+????J]k?3j@??8D???1V??????mjc?X?G?L?MZ5?????6????????i?ow?v:??o??}????R?? ??8P?9\??!???"r?X7?k????????fQ?gy?cI?c?eXp???Zl?F?6???P???u??V???I*3?=????1?? ?o1 +, ?A? ?? `'?'8xN???a?qH???????1??Y?x????m26G`?y?a???? >>aEp4??>.p7?????x?????? 4? $?y}n3?q?>}R?p??m?;[F ;?o,]/|c?????????Pn?????@N???????&??Mbr???x?t???F^\=?????_?? +?7??_?K?c?? ??]v??j?~??~? ???o??o0?_?@???????5??!?z?N?(??k?'8????%;?Ev??H>?9?8|???s >???s????K?f???\??? ??G?S??&??z????0?E?w(???q=??X????&y????#Xop?S4???k? +;????H?c:(???N?]??F$?"??????????? ??w??`???WaC?!?;?~w?1?2?????Nt6 RE&B?????GEx?L???C??lOG??^e?;?n7?? +z?L\??^? ??????3JJ#????upO{??>??C????j??????(?MDv&?s?=??H?Fv-?"????? ???r%???(l??vP??xc??Q??h+D???q?Hg:'v??y)???\+rS?t?%???.'???^??d7!? y?X?C[??i??&&;?????~+? ??dZ???q??3???- ?[??F??????,?O???_F??B? ?n?7!?????????W?Md??y?v???3d?5p?P????qw????f???M?T??]??l6??K???I??H Szb????????%'*1?\ ).??6????5m?b??(:mHQi??zD?o)}??'??&????d?T??&[z???g??Q???Eg?(*?Q?????X?0?&?:??u??f]?q?????? ??? ???????w???.}??c~?Z??wJ???FLv?Q??{Y??i?}??2|??/???Cq?]??-X?????Z9*7???#.?|?*`??X?DG??q?4??|:?A?Z??8IG???^???7??%~????q\?|B?zLy?G???~X!V??????{???_?????O +???/???x t g???#??????~)?gJ?$??l??!?]?r??l?H???n?B;??R???????>??????AQ?f?Ei???O???m???c??~?????F?[ ?l??????l???????g???67`??e\????????ev??9?H???{??)????M??4u-$S???@?#??q0*?N??b???l????????R?=n?k2b+??????????)Gd???+? +??q"s?t{?=????yM{?f???s?? P??'d?????og1{????M??????l??"?I#???7????3?y???@?9?i?Wg?r?=??o?yF?0?F?$??~%? ???7???~;{??=?m???U?V???k?????>y????w??W???LkN???Dz,???F???B???X? ??h%????h??????.?X?V.V5?d????Z?n?eN?T?w??.??%7???P&????4?.?X ??Azd?l??=D??q8?q?b????????c? e??X??N?.t???.??d[??j?IZ?G????N?18`??6r????z?mU???F9??????0??S?????S?#????6?,??1?I??h??ip?sU?\91W??D?q???q?1{5?FK???+a??K????Y?????S?a8??*s??x?????O=?????9?????6?,?U?zj?8??*1?&t??????? +6B????? YQ???????";_???FSV?????????pU???????n?O?xvaT??*YV?z+?e?>Q?OL???N?z???pn?^d?X1?? m?????|??K?/??A???????F]??Hd???????]???Q?3+*s?|pkL?????51-?7J?v????1%?`?????s^???????3? ;?m?j?&S??????/)???Z???????1?(?? +?????Q^01? +J???"???????2?%??t???kb???1~?????>??????T?`I?@H?I????s?2?]?J??r?k??R5?*^????? + \??+??>??&???{X?+?*^%???????I??9?\?V?r'nb???? ??F???r6???R?@?1?jh?Z?Ob?8?G[??xtM????#???Zt^ ??? +MG?~+??????n#????s????s?~??????r 0???? ?F]?h"YM ?? z"HM{x??jB?4??4 H?s?C????r?????? +???Y???c3q?N?!;b?|?????;?k?Yv-=??d??+  ??b!Z?I;u?I]D?E?XD^?|Dn??l??90?B;[?????p)????-??%|xK?l??s?5p^m??).O8???\]?$????nr????????0??x?8H/????DeB??lk?o??????? ??:??Y}???\D-Az????Ml?'e??????!??? 9`? P??c?Y??r=?u-3b?5??ci??&?????j??q?,??????W8W*>?/[?????JN??c?F?? ????$v????J??0??Q??????7??8?q?|??[9s??.?x??\?? j ?Vm?T???K[H???{?c??d??>NS?'???16?1z?f?^f??/??O?^??????X?k????s?K|;??{?"?-????pX???~??$/R?23~I??$?Cx?w?#?????5? ?m?'?????w?:???z??? ??G ?_?????~???g4?Oi??P\oR8?"??I???i?uU?WQ??q???^A?\????p~? >??'>?txr@???v?6??????o??4~\????? +?s?.??(??i???? L??Sh??H??xs??E? x ?????W??Z?^??s?ga??%(U??Xj$?-??9?y??9??i??S?q??>C?????K?N!???^???????7%?Ip???]p\?R?\,?I?M????2???m?e??Z??n;v???-???~GcP?eR????F?Wl??K????lP?wa?S ????^????m ??+??????R????u?????#t|D? ???Oh?????:V???:z$??A&?????+?G up?#?tm???7k??&:h?{?_??g????9???????h??S,v]??????N???>??6:??ze~?????#L^Z?K?9????????l?&??H%5?1?to=_??????????oJ??!+@??*?B`;??-?d???`xl???????=?v??HHx?'Z?d???8i5'?"?U\$Utd?QMm??p?U?????k?K??????????F??I?? ?????F???R8??W??:?2_*?P?,'n*?EM8? '???r2u?L'??]????V?e??? ?????.??_|?W?????<:????w2?%?I2??d&?L2????? $$????a?%,eQP?E -Rh)?RE,?? ?jkE?Z%???R?X????2??PO?=m?ir?=??????}????`?8??? N 8 ?x??b'??Q???L?e ??x?g??Y?n?8v?8j?-??s2??? 13?eX???w?^.|???}? :I?4???$Ug? +?8I??c???_????*?n?8t?y?!*z9?7??:??????oG|,qwZ?b~???d????$?%???xC?????%V?I???/??GZ?Di??M???8X??1b1L?a????)?NL?;?????:!?8x?????h@??????gI?????wcL~??#8P??!b7Z?f??c?X?)b ???? ?.??* 6 c? 2OC?< 4-?S?h?;D ?+???L`?;????????k?D6?ipI?Y<\\p???(??????j??? 1?Lj?K?9V????h?? k ?v???r?"??6J?lSEg?xFp"X1\????4pGe????wwRD{"??T?'??'?E????U?5 at B" d ??*v?h?x????>*WtQxX+??A???Gc?B?3?~3?^???r????M,!:?Dv8??n???%????:???b??KH?&G?h???? ??1.?^3?&5?f????b??c?????? ]? ?pp?!?U8???? ????@d? ??}\?? ??N???h? r??(?8?;?;a????H??)??I4+I4)I?B???D|l? ?88?NH5X???hS;????N$s???????mD8?uP?N4????4?X?),.?????????4??4?t?4N?4? +?C*?8?L??xR?I??`?;Y???K"????D?H?B??g???{?6O?h:X?Y?&????x6??C??C????C?C??=??h29?3????J;-??t&???d(&y%u?ED???{,=a ?["?????E??52-F2?Q???^5????.??G ??0??`??k6?)?????B?ze^?[??al???.??6*??L?0/??1?o*?y +;?Z??3?4?@M????>?y{M:1?yh????9?w???}fn?W0?#}x??? ????ae?Y~rf?(?f?8?9?b~C??????????x??t?f8pD5$^??C???N???t-?????e?*z????o?????!wV>r??????a???S??}??3??5??9??????????zd????~p?t?????>???? >#??/^?t?2?_?z????m?=f?C??,M?e????&9Q??????{v??????_4?o??X???? ?????n???????????6m?8?qh?????C?oo?C??Pt??j???e????pFqh??q?????????????O???k??|3?'????c??????g???<[U{?T??} ??~???+? 7s??%???_?QM]y?c;G???c?yzf:????mO???9?AE?V????@,*???M?" ???.(!y/yy ???????@6??H?27A,L???3?????~????????!S?????o`??j???J(B?_? U? ???z}}??A1?o-?t?8b?>?a???H?`?????J?!(V???c???? ?/`??|?0| 4????Dz??G%?2TC??M?? 0???F?5|c?qa?? :?+?XhC?rJ??S????V?D?? a??G??Z?)8^??w? ?? ?? ?????????3??? ?f? \??S?n?;4b? ?????_iG&6??!??? ?q??\?F?eK"G??J?}L%f?P?(??)?PK?c?R64j ?o:?? ~??p,?3?k?0??h?;2Q??*Xu?KNqo??;k??4,\O)#?PL?1?/???.Z??]???v??-??j??_?b>Z???8d???Dx?P?????!?5?????+9?? Jd ?D?TI??????&?????.v???W????~E?Whl??8?'?fpO~?eoEn??????4M ???I?:?&(??;?Lj?W???.?h????\??;?/?????B??1?G?5CO?????G U6???0????;I??3?F??o??a?0m?6L?? ?W????j ??S~]??s?Z??s????[??g?$??)=#?7??? /?f???h$??U9 ~z?`? ????c ?5?c???`???\-? ??6g???D>?0?@?A?1?a?7?dPC??g???7\'?D&>??*?t=?n??j???e??a?z???{?P?7??$?? +?t??????????QI??S*?Y? ZI0l?x???4T???????R??<7<0*???\?X?C??)C????V*??&2?pg?u??A??Z?T?????t???)??J(?Q!0 ???????i?/?????S??>L- ?i9!p[S?*?l? +?Y????nb???R?t????S?????????0?)?!?P2?B??PeG?Z?????? ??s:U>?:????J???Dj???9?p? %W?)H?d????+z`?]???4??J????*?t? ??F8 ??|`(?Q??v?????C*???T_????Y???*????ka?0x7?m(????9?K?{`????????E??]???)V?"3X[????FF??d???9??????-?D"??Ht_????w +{?.05???,??83?%?Q3??)?R? O)?5!?&??~???; ?H??? ?<0??????7?e??^?vw?????g??8i?:N_??4?????m?-W?~?+?]??.[?i??Why?????jD +????;?e??????=?????uQ?u?c-??X?U?I~?0Q|?Nz???y?????f(??Y]???t????K???o???Z??>????t????8????????'?v??0???%[?????Rd ?$??Yi+?#????k?%??????N5??3?+?> X ???8?[?????G?r???????U?6??9z??>??I]|s?6?p\{??$???TR?G?Vg#S k??w??W???n +????t8{????tgN??=??D~?U?]m?V5??:bN??2??9lL?=hL?;`???j??w1V??9}l??????????" u?H??*b??3Q??????8/?? ???ko?2?L? ??i??,?C??00???{}fi?2#?Zj?Xt7]B?[hE??g9 ???u?ty?QC0???>5s?X????4p I??.?5?1??;j ?58?~0???Z? +?????|}%:???^?=???a0?? NC????b5E?5??7?U???kx?6?}?a??^>0L?a?U5z??#$v?9C%0?8 v?t +y?a?e?iX??4l????? ?pm?J???~???r?????N`0rz??A???u?\U??CxP<0?C9??`?H?? ?{????.???b ?C??????0v?hZw#|8NI?_?2`? ?C?x?X?;??i??G??ka?h?F/ka~W?eD???#r*???XbC?r; +z?co??)?=????7?u????q??>??}??i+j??????3+??.UWA??*6T?;F??j?R???\"7??$?$$$??s'$!&$?@7 ??N??T?L???v????i~???'>?o1????Y{???u\~?dH?????????%?\?????? ??wm??>?;d??????8??W???W???Q?@Z???:G????q??G?V?$???9eV4>oS?9?-??9??2IO4a????c???8Nt?#???$^To +k|&?gL?8U???b???G?rp?}Cp??*s?M?s?????????)M? ??????$F?b??#??,1?w,qM?? ????[n?q?s*??zoH1????????:Hp;]?>????|?t|??????????9X{x???[??l??oX?J#???????k?????J??|Ryu?Y?????!??????????=e?A/?G?????>v????}????.?????????K7qa?wRC??_u>x~???????4??u??$????i?Zb??????l ??????R??3????Zls_u<_W?S?+?z;????eE?2?U????k?B-?????8mU?P ?k?nL??vl??s????&???i?;+Bal?K?????? ????"5??Q????? ]&~?????ov?????? +?zY??|m???3bf??R?x???????Q???.????6\????kl``????^?????e?????a??,xa'?_:jx????"g??Y?+w??????("??l??d?5 k?H?@?,?'_B$?d?,?@?A?!??~???c??0???w????I?)$?r*?fO!??'Xr'??????%???r?+C??????M??s????R??k???%? [??f7_9oq|Q?qs|7!?%?r?o'7??f??DU??+????^???e?&7e??c2??[??9???????I?/;x????? ?q`J?c0#'???s_<??????H????IU1?Ur??0?????? ???Wp?6???????X7>?W?ZjC?Z???&???@????Z?+??-?>7W?1?U M?j?Y?T??j????????????`?50?>r 2?!9??MBn?d??_?|???8p5#????w??)S^kl?1????4?l??X???wz?? j??z?????}2? 7?k????qHNDz???H????b????Y;??&l?????9?h?7 Xu C6mgq-????vU??1?(?K?????z?A.d`F?A???CZ?KK????(NR?`?X9f?BO???a?a?m?-?????%M#??i?,?e?c????lhy? ?E:d???;?C??k 2 B??x????s fR???B_????vf????#??g;?l?s?upe +?Y??H??[??F????#;?h?5? 2?!?pt??????z?s1T?P.`??N??QZ??2Kl_?????d;??n??'??'q ;$n^?d??1? +??a?? >?????k 20?~??*/7?????:???m???H??????vi??_Uz?T??ueC?yD??M?3w??Nx?????2p#?'??A\?Hj_?S??,??*V??????y ??9I?????uEms? ?????O?8D??[???????O ??1/*?'???'a??i??9`?*??A?lX?j???t~V??l????????~?.?_???l?[?=??@[`?`?? ?~aX??0?? ??D??Ib????mk??d?!???&?? +??W?0?j[U???T??e?'?q}<?????d??P??????@??d ???.?~6?????>.??'???????Ry ???&q?N'??M?0?J^?G????1?????0?L:?G??wid?6?O?h??#AH?!d???? "?>??T????uE??L }P$r?????d??GJ?M?P?J?e]t?I%L??g???:??? ??(T?v??p3?l?? ?? ????G@|o42??xT?V?5???zV???????t???T??VU??????o??l)?Oq??/T&o??????^?p3?N ~2H ?2#?;c??????>i??hc?????vs?`~>Z?3$?)?????7zN????+c?z?~/????????n??h???}?\ +9 ?z(???????D????j?L?????LK?t?T?25??3?NWM??2?NO?,???????}AE at A????,*?[?AQD  +&?$??????&?9???}~^?????s6??{6?Ev?`p??kh??>??vz3z?OoD?.o?n?y6??k0????|??????5D??rd???*???Tq?Y ?39??J?r??`?2?X????r?<A.???Gl???UG?wC???)?W??g^?]?nCy??????Y? T??0???????f?%N???0?YHU???g??Y?G*?? e1o?????\?R6.g)????2V"v8}??|?J?=?+???????n?????3D??5?^&\Gr?+?=????Zc??L?:I?fNS??gs??4x5M?U??2g?2f????n}????z!????5??)???????????'v}"F?????R?=????^m??-]?,4%???4i??,KO??\ O?? +?(m?t??f&I?U%h3????????B??NZ??|??9????rGt?%????F?Y/??!?b[x?itW?5?oIb?2?f?0??3?b"? ??F?$?X+A[??6/?????????'???'[??!z?????/??g4? U??? +??l?6?lD?qk?f?-??ZM????????B~???FY}?:??? B?AqZ4??3???/?w??{m??<+?????y?7??=Dq&??Vte?????Iuq?????????6?? ;l?????:?j?t3????????W???/??M?p?? ? /????4??/6?}!?]Hn?A9v?!???8Z??????(z?????=l1b_?`7?4p??p??1????o???|?e ???:p?Z ?~?ny????~D*?D&??? BQ 2#??9??&??>?f????????????Lt4??  ?1???f??/j???E???$p?_Y?G?tp?~?? ???7" ?!"2?G?h?????q? %??'zL?;?'??TG????? 8?Y??D!???\p??Xp? +\?!??s4pv ????{???A>^ ??D??d?}?????.@????. /????? ?p?h?? h?rT??`??????????ze??f?$??D{ _????;?e6v??h?]I?XQ}BK???%Y6hIR .&k??Q??? ??#?n???????=&lh? ???6?????????(?AIu??V???g??)?6b1???g??g?p?-???????%???R?|??2'??iD3h??3?P???x? ?`Cl?? U^??&(??:?_G?)M??????h$?*???B,??????8?f _dB ?L?b?b?R?KWKg2?R9N-g9?:64??7???y??r?y???????0?~m m?"??^?f=???? +[???%b???/??R#F(3?%? ?\?U???*?^%"8???Ol`???M????????P?>?a?vmB??X????i?? ?????/??!???>;???????I????:?%$??hd?R)5?5????????N??"(f%D??p?bV?75?C?R9??&??>??a64?????"?x?????6'?j???Y?AwZ????????\??gN?r?UZ????0???Mj?Ir??,???Oj:(?jGZ?h????? X????7??D????0??a?7???DuYj?Le?1???I?~f????Gur???E? ???i:>mB?+??;?????z?h? l????? t{G}? ?x????Uao?e?r?;-??Su?D_V???k?s?m:u??AD7 +????qC;}?I+]???'t?d? G???gh? ??n?NW??^??w???aiWQ???d???[[P????1}5uj??B?????IH??;?$??GSK???)7??e ????????'C????{?????C?????s???K^ F5<???t'??yh??????*?W????bKG???Z*^?2??? ???!52KezG?h?? ?`6??????????G?+?_?????4????????* ???????????V?E?Y??:?}'~??5c??=???5?I??m??7? Wf)?????Ven?????"'?M????d?n?K2?a?l?c{??5?\???hv???Y?eG??s??p??vN??]??q??W???8?>???0?&M?X?[????A?W|?@?????@?}?????? e"S?? ???N???`?2&?i?W;??(Vx?gI?9????Ha? X@?E~???@4???J?? gu??x?,W~?@??Pw??????!?Q??s3???'u?H?????KW??????&?O? +??A2j#E??-I?$ym8q?S????\\??]?^?d?n?????s????dg??????V???7?????????\uJ?/?d&?]Q*L1U{?Ao(??S?????:?D\=NA{??3_x?? n????a??Bi?????7??_v??5???????V&O??? l??????kv1 +N??K? ??#'P?g????_~ +?cG???d08??????D ????zx$?~]=1?&???h "?)????7?#?>???R?S??9?%???EBy?[???gc??e,`??jQ?,F?????{M? IJ?????????5???????!^k??e?(?s?B ?w;?/t??>?a?r at j?%??[rA???b,j?????y???????5???X?????+[~ i?D??J?F[{?G??bF?8????Z????T} ? ????A?????Q,?"?F!GS??d?J?JR??????!?]???:??|????g?- /??V_??0fm9h?{N5?????x ?}??&3?D?X???O?X?????X???>4IY O??.\8(X????co???t??MN!?\b1o\S??YQ?~?WP%??'D?E4??%? +i?S???i?????Z???q???T?? t????????a?>Hn:?A???_9??_3???4???"???# p?H??????????(;`-,?w5??k%??*??L ?k?.K????t????F4?????!??????/?pv?j????+??]???l}?* ?B?Gj?;.?m ?w;$??Md??Vb??fz&b#/?y????N?qXk??_???K_ACm$???@m???zp?P8{???????????jOg???0?vt?hq???????e?@X?-?C$!$!! 6?????Q?:j???" 4?8?? ??????????????w?^?5???%?Sq?#?~??!???`?ER?? -????0X/?.C??%??"??q???5??x?x????TW?:????;?Bm???8? +? ?G}????????Ou?\????b??l???-?a?=&F!0?????????7??????#????h?0'kD?X!? $??? ?????X? ??69? ?X?#??K?X?<?|u???Mk`??&[?aj??????m?=6p69?????g??dj???uh???I>L???A???9Ad??I??n?/???e??????? ???`??L?Z?t?,6??Z? ?7??y?~?o? cP ?@?? ?v f?????c?????2V??u???S ?Hv0F??%??F ?H??c$OzO-c.???-???????s^???V,?r?H?x?F???F??a??8"???@:??? i?R???xI??hE?Tx??eH??y?????{?c?#????K????.3????????+?????D+t! ??A??? ??A(??Q?????yIX?,?&m:?%x??%}'????TL??*yz?\C?W1@?????_1?U=?????G??F?Zk ??A??.?k?A`??&???QCm??zm}0HH??4H???=kQa??V?/??YM7??????Z?1^?4\x? ?}P??U?$?.?*. %?????3,????????ooL?o?a??VpN?s?????%??M?Z??n?B??.C??U(??-???/????n?1??p??t???txQ????[5t???8?%y???4??S(W?3jZ9g;k9??*2~?,?=?(???)y?E??????-???? # ? ??????j??????S?G?>=ko?????a????J?????KU ???e{>??I??T#:?^.??.?=]$??](??S.|?!??? N? ??5?CrH??4n^???G??uo?m}~??Hs??t??????.??????5(??!R?Q??T)??UrIsqnI?\?v:Wz?L????9??????2q?L\?ZY|?.??^}??????????5?????V?eH??????tYC???h??T???????o[?I??-|?k???c??? ?k?t??67q?=???;???7C????Y{?,????W?\T?Y+???&???Z??]?+-?9??L??tYC? +2~Q?E??ukU??????c???>?y??+?? n??9THi?:?????#???????s?[???M?J??$e???????+??+???????????ea??+? g?2U(8?l&J?+|n\????*???Z?6???nt?V??e[?????fg??8x?k_???A??g??x???yw?_v??[?~????NO????+?=??%?J?_??3tF???LA??S?K([Q??sH????!??7??? ??J?:? +??xe?g N????{?I?N???$????H\?_/p}?S????}??????Om?EnE?y?e gg???sHo?,??my?v i?W??i?Z?????Q??Q#??sj?E??&??:,?O +?z?3??>?~?e??Y???0????v????????V?yk@??B&??H? +yzk&i{?(?_N???`^?73??q?-6??'??&h^?N????P?$;*p?{???0?sd???)Oo!C????M3HS?"?>?jf??????i?>m?? ????5-??1p??%0{?\??s?QS???9??L=B??$?OT%???H???&???}?o??<=??ULH?uB?>?A? +?bZC?t?c?f&v????X?D?? +?[???p????????`'????f?t? +"??+??L??XL??X????????U??&B??RH??9?;??B????! 4?? ????@?'%?4????r???"??da?B +4?'b????:0????N,M<`?????lh????j ???1??l??R]w"?m.{lG?\???4???wR?OH&fQ??`f~R???j??????}?_???L?m??r ???`l?k?????cd?@?]1???????C??.2?t?o-??R??????d r?r?f?0_1Kg?@}>k1?izj????&?,`??6k?A]k?u???????w?.?6oB??D????[???'OE???/d???$ B?Afp?8??yJL,?M?r??????a?#??????0^?????z?&8?? ?_??Mm-?V?~+5!PWG??J?j??, +DK?_S~[?k??@;-??!??? p"?????Y? |A+??CU?o?`??'A?~??????g???W?(??`?>????`M???EC??!\??? ????a???z??M? ?????!7x${???'???yp? ??-?a?$?????(?? ?B??'cL8???O_??(YC?b5???5??j???>B?M!???'t?o????????o<8_??Q?????????]4m?'???3l??)l??%?=s,????H??b?????(?Bd ?: b?? -????p5C???\???xx??-???`8???? ?%??S?7????G?"?0???=??_z?E>?|?j??!??")@?J|??!b?bvk!?^1L3???????q\?;vV0??B???z1?Z??????????8?? +Z???u?V?x*W?Z?UQ??'(??!+?? ? ??????@ +e(???????? +Yl??R?{??w???^??|?x???]????3?)?Y???5~?? ?/p????[Bg?[??@?23?? h???_ p???. at q>R???H +?q?? ????A? ???"??w ?/????f?;???????c????% ?&?^???@i [?4/ ?lH?]?{W??M?v?[?8w?]O??y@??0???1???T?.?m??X?????4?s?e???2?5B5zOS??f?=!??????f?@>?G??j??s*??97?E??q?:iB?T?b? ???0v???@?????.???????1???#2/??????L?W??JMm!r?M?\~Cba?t??6?S? Z/-M4H5?D?5??pc? +?x?7.2?'C +l ??? {??8????c????7Q???????P???~y??( +??K??`?h ???:rVVUr^n?????(??*?8?B????? 2 ?(?????\S?? ?mV???????:????#_M???3T?=?????n?_x???k?"?M?8|=O?f??4^vI?LR@,U?M*a8_J??rI:???Wd? 2!?(?Z"?j +l o?????t??????.s? ??Q??(\?$????L?H?!?g???\??B6s@?N)`?????<6]?`? ??4??jch????? ?vZ?+??j???Z?=????G??4:??s?lS?5???????l?2??????"????)gg??3?5*??(?3RHc????Z??S2?z9?i?? +????6.?-??`??7?v??k??e????????O}?r????:??JDIiX?J?J?Ua8|9%?%???9?|??R?=.? _j??Q5??U?ptJ +[/???????????????????L??n?????{}?e{??}c??????hS?????9???;^?????+???Hl?$'?&0?Z(???L?_?&???C??d??%AFIH?Q?#??rM?W:lH??lA +?/3?z?-??NK???5?4??????5?FO??<{P6?|F0???|v5?@????????_#?????'????????\? ?y~2?v?hBX?(??u??j?q5??????????HM?? SA?? +!z1 ?R?ih????"??Qx??E??>4?J??#???E???T?vBN=!??cs?Ax??>s?? +?VmA?????H????.M???L????Lc?4=??X??E?PV8N CqM? ??1$?p?-??r? +U????|@??????:????Y5X?T ??l?? t?1?$????vH:x????HB~4 ??X??G?L??? 4S?????G;s???z !-?Q??G?? d_'! 3?? {#;c?s!???\(B???9??LE?w2X> ????pm?????nC L?????}???H?7??{??M.? ? 5????!u]??@?? +???b?{B??????z???%"?}3b? aQKB?Z??@?c#??l>?E?%?n?{G????BD? 9.B??h"?!:?})?cAh h??CA?_??? ??Q?(H???? ?8??H?????b }WR???q???S|?/ +?pU???@?Z???r;?c????C????? q'9?@1?PD9??:C? +Y?;?t/???P??PPIIDf?k??????#Y`?C??l$_#?5??%???V?[jM?????)?t?C?? ? ?l?{??:B?? i?+???CK??.???`(eQ?k?????`{?{??8,D?11Xgs?vI +??fR?????? F?l,yXfI???93??\T?p?j?q-?N???#????;?k?N?[?gl?????.v?????sQ???b i???0U?????+???2??[_%y??![t@?)?R?Z???yJ_J??K2v??1>?????&*??O??)&*?VNT??l?{v?S?UN0Qw?v? v?.????M}????+#??1??]#g??M??Y?U???]?B?????P?c?1??W[???1?+?SyW?jk[???????Q?U?????]?|SR??L?????j??>.??5??GH#?gR0????T,??/3?=?f?????????]u?????e>?? +?}?s#?????j~Z?2??*aK???BEz}????4??T&N?1??"?Z(d? ? O?H&!?pH?KA?|2, OV,???]????????M??4?Z??q????r?Pz??? "?ZW.II)f??xb?4?H$??????r?????????pXj1?9.?05|m`??@80?((?C?V4???ZHi\Fy?j?s?????&??z???????????* ?c??) +)??#p??q6?Z??u??y???W?L?h+{L??R ???6??@?????d??>???#???????I?Q??yv?g???c???2=?'?^M?l??7l??I? ??`???Y??D?k?? C?P?x9????A??V?%?v??r??x?nO?q?? ? >|?1??~V??@^??`}???>???$???}???t???????l??r??J?y"? +??M??~]?6??S???Y???K8#;?0??Z????D{9?? Gx? Sa??HC?_TE8?T?????1???S?c??X????k?5?~1?M? ???*??^T'87??? ? np?t???/P_{?RG?f??^?T0&?wc?????!????? A??B???|?E??["?i`??????~?n?|@???J{??q}??ra?]'?]?w???o?????? ?? ?bp?(???h;#\M?\A?r???CD?-?2G?????~?`6z?9???}?o?????????}????#}????f?2?????Y???5C?<2",t??>?5?m ??l??B$q?#???$;De9????gB?1????L?S???>??|?????g??.g??g???3???3??????D??`?NA?&???A??(???x??????B??Dj????x??x??:6???_?3?????[0r????????????k????????7?C????????????p?o?4C?!??yQD?MZI???G??dn ?]6??N-??O??%?x??)}??T F? z?:?k {~??w???W??#^pG??????Q?????+??Q^?? ??d?Kt??|.r???O??????"?%? +8k?d???3\?3???E???q??g?z^LS????Q}??1??]?# z?=|u?>nECt5???H??$?KtM??.????`?\(7???QM???-FKY????6????i?g2?????G?J???????????q=?_D n$???M??I?%?$?.@??x??' ??)g5?H? !?5??Y???J?R??(?????U???*},_Wp6.L????b?f+?=? +??4??vJ??????[?gRo?:??%tK?$??\?#?(?*=??Uz*????????Hd?[?????(X??2K???5?U6?????T{??|{M????e ?=????????-???U???t??3?!?r? iGJGV??+??????x,?T?????/????>?>??(???Izd?2 ???`??9S??c5?)o?????????[q_f?])?w???Pf??* +???9?? Y??2~?>-9/kK?)???_y?dD?,?????k??????9??E#$S?H?^9D?-5'???????<\?F?u??~?a'?|????l?CM?^??Y???(? /?&?)?????f??6???G?~U??OaC????????????9S???iYm??FS??h??<32?????\k|???`?%?E??n_?????^?n?slo??=:U +?y?*r?'??[??????9G +???????x????6oH?7O]?G>???O??O?U???V(?? +?DQE???P? Ru? !?)????N?P??i?\??6 +??????3??N?,???mho +u=???R/ +k?M??+?Ug???:?B_???f+?I?? ??1f4c?2?N?#"K$??Y?? I?$?A()???-G????q???{J?s???b????{?s???>??????3oEyi???J???????E?%??sK^??L???? +???y????B?DL$oH#?S2 ??P??Z??d??gU?A?r???R8g????a~?t?]??p???????;??????HR?gUTH??H??e??d???x??IY?????Ry??dZ*)QH??$3?)X? +?+???z*u????d??????s??j??+?d?????+_????<8U??_???oZ?U6-Q???V?L????;????6* ]????%0?k?r???M?%???N_???`???H??-qa{??be'?vH?wf??e??U?&???k?????S_3Y???T~R???:????r?????c?[I?/??~???N!?r??????z?u?y? s????b??]??9??^???u??:???z/t??????@?/t8????6???\???F'?????7^??????a????v????f???+???w????3?????????????^a???Z??Fe~??Ev?J'????????<(?B??)??????[h?~?/???Qo?G?5?~F?&6??g$??S??l?????9??????_??D_L???3???-?Ckl?Za?5??Y?4????S???n?g? 4??_2?? ?=?????5 + a?T F????i????z,u? :N???Q+lP???W[b?>?a?Zc?l??????>~Q???A?u??C$??? { ?W??S????M ??4_\??D???j?h???? lP?pzkZa?"6?k???%??????k?\g%?]???/??z??D???&2?U???E?????q??y??F ?????-???4eX#??A?? +TMq??ZtU?Go?U???F,?A??%????2M ?X??????u?2?d????~??L?>x?|!?@?Q??E- +?Q?q%??ihD[???????5C???EE?TT0H??j@?,????J??. ?? ??????:???@g?&:?/F???h+^???U?+?-? ?}?Y &hq????????????&?>&^??h????d?9g????h6cF?O(???h?j?@[?G?:?4P$?DW?-t ?F?mzh?s5 +? ????GL???r????,?-??O?b??Cb?x?????5??e)?h1?B???,?? ??7????(?TC?u??N????????h? +tI]??%?h?????_??uM,?nc#????^K?r??>???s?g???Lp???I+???E?OVh5?E?2??CC7}:3U??B }y??^?=?pC??l?=???5????p? t?f?]#m[???tp?????\?? ?y +?O??????????>? ??????????,??k??'! ?8[?RqX?-UZ??(3???@H !!dN?'???!@?%L2 ??^?? ?,G?\q??R?}??=???????g??M???> ? +?u??a??,?!?????@&.??@3?Y)n??-?+??+1o?????????>??h??K??_??;>: ?}p?{?t??????+?Y???S??????w?Gn??\n?X?3C0???lt?o???h~f@ ??'?7~o?????_~?m| +l?{W????????m??????3????~?q????3???????S"?<"??}?}???AX???@Zg? F??b ll?N?????f??'F??T??w?D??? +?WAv??J??t?/???_?qy?{???A?I?1????]??????s??[>?5???`?> x??B?jC????ki|l?] ? ?+>?Hoc??^19V? ????v??8<%:? ?p}R??aH??Dp/?~????A?{? z?q#h?k4???@??C?0 +$}@(?W??@]E??6??j?????.?^K}???m|?cZ=????I?5?p?KW???,??E??????:??w????|??|/`?2C?H? $C?BO?C? ?~ ?lN?(3?W?|=R, ???b?&?a?T???????e???Dq?o??np??F?J?f???? +?aF}?%z{??/t0r(?|?m??%???S??H???AD":? \@^???=??;*Y???5?/? ?VF??;L'?+d??O???z"???T?tY?L?????G:[??X?( ?"?u??/????zzVd?&J7?n??~3R???b>??s*???}1.?Y??????? r[bNd?? ?AZ??7qkD?xU?!?A?}~??m|?x%b????[@]???rSd??w??h?`!zR??W?E???L??\??k?????S???F;w??y?+??-?????lzm???K*?T&????%??B?l\T({-,?_#^???2??"M??9o? ??m??0B?F?^?1?Y?]??6?p???l?????m??s{??Y) +??N?T??bTf????s??5???N?:i@??t[??4+?M?*??$??$A?>??e?3?va??O@</#??:#4Z??V??????zo8Zl???tj????u?J?2?Q???)?[w S'Pe?O??f???)?_I??bE0??T?????F??i4?B?n??4 at c?t??;t?i?a?!?e?????Tnm)?p?/fy? +y??|)E????W???Ujx??U +?f?H2?ei?1y?b?_%iY L?AF??X}??????.????????W&LU5Is?????d9?2y?WI??yQ?>$) ^? ?x4?9?] ??]????f?????M??mu??Y45?n????????V????????HEI&+]??&?d?$"?q? D?????W?(o^$V?@??>???G??(?h?V????? ?g?LP_?z|W?6??N???{,+[?m??Tu=k?????Z?V??X???????jE??????+2v?K)?d^?-O????\*By??!4??`?;s???????p?V?C???z<7?!Y??S?mQ{[??M?0yC2MT????i???j~tM??^3,?U??u??? +0K???O?Gh:?ql#jD???B????????"!????????????k?W]?pW\? +_????L/?T?.??0~????}y?d?_????s??5S??? ??K8???U?ex? ?t5??z?9{???*B??????ZB6?b??7??? ??? r?ew????$????|g????+ +??+Sf?-Sg????Z?5!?~R\??M}Gr??m? d??@?9?LC?G?Q?????P?????52??5????1?O????g? 7????LH?I?l?y??3X?5??5|?K???wA?;????%#w??~?a???3?9:?O??7B???O????????? ???????$??C7?gjG???? ??1?I ??TH?P"?RAC?(??b?????(???? U???w??K0H??|??W???? ?,???/h%$???4?H2&?8p1?3? 2RB???;???d"F??/2??0???R W,?t?z+Gl???a???dc?6??Bgp?B6S??O?FBf????Qw;!?? ?!?0!a??83??1?!` +???F??'?F??d?`??l[?[??? MXXe??&3???J???L??????!????<%$???????S0%??G?? ????Z8pc8??a?5?Ls1?7b ?? ???"(?,f??l2?t?? ??H?!?C?0EP??Z H???Wb ??6???????o???N??? ????.????9?^????r???]F?\>????w???w8?@?!?? i?1??*7EX?BS,??A??B|???#2?*???,???Bh? B??A???9C?????B?? ?}??;L9??????G?^????O?????? +ZC?7?M?w?J3?k??4k$??BS?8???Q??}8??7??sPY?????O??aG}a?ozA????????K?.?P?y?+F|??B????U?'@ ?A???$ )|6t~?? +1E???$???l??B????n? ?y}q??{'m?~???e?v???????_D]?*Q????C}M??j????Z?}~M?Q?#?T?5?t? +??A???????B? y?N +?&(P????Y ?f???????+u?I_???~-?LY/lO???w??.?Bq+kW????3+#ku?QWt?T5?-??t??t??g'Cs?rRMU???#h'21???G?\#????%bV????E?VM ??????3???x??????]?AM?w?%????z??????[Q<@`d?"??71?@??? ?`8?n?? r(??????????;[?????v?????.????>???}??O??~??m?U?#?y??5O2?c???Ph???Nz^? E?%WO?sa~%?"?\?xIJ???-A\??d?+?i?????c?h????]?k:????kKc}???Z?>??uD_!??b??J??9????sjZj??-/???k?DR?eqL?C?[QLH5???8??OA??I?s????f?&?F????? :;?[7?yn?m??^??G?@w???=?5_?Q?Q??\??L0????FntI?@\2)?`? +Ez?!??q$?m ??[X?=,?~??$?m???????Zz?Z?u?m?8?k_????-?Y??qU4???7J????????35Z????%0?q??q>?h?????????5 ????e ???M.?>?? +v??6??n???S???dN???r??6???/c?X???Q?RW?D?'m\?C???/5?&?/?????GQ??0"?#r` +????? ?.v? ??o?P7 P6 Pp ?? d-????k? ???7G? ?????????g?E????)?????{??}?;???;???{?x]C??/#???O??op?w?70????? S??f?????%+????fGB?????||=?d?2S???,{?9j'??@2g??_???]?????g??w??Yy??~r???]Fn???1K?q&??^?|?&?_~??(????1A?x?~??????ZQ??? zry?jKZ?nxm??,????j?7/Jw??h???????=w8??.????*r????Z2?;????3]??????o$.H??G????{??n??A"?????k???]?qMX[?Od ;????@6??C?,??Z?????zZ?X??z8( +??s?:q`??\Hm"( ??^?????????|>????Xfp???*????3?y]l????H1?M???????u?P??? ????T??f?N~??2????H??u???Xx`N+@*??? ?????!b?BG?(A????~??m???k??< ??-?&?????|?/??Jf??o'??H??6?h?????? <?\?E1x?5x???????T)P?1?f?qt ?N7E-? ??R??[?????!x???????? ?\????@? =?????t&?f?PR?F?R6??&6p?|????<4=D#~&.?>??????????jX(?`?t??p?n?-m???????f???????d0f}R????`???`D????????????j??g?}4????G???????rd??r???;???P??&??s&y\t????e?y???|?`T^i??i????>???a???Q?VvF?FvE8 k??>?H? ?? ????Rw???? Mu?RBCw{??Z?h?L?0o???cp< ?9Tsx?}?G|JC^????0??b??c????F?JY+?S?*??=????w?/?NE???r????;?h??D?6Jt?? ?I?4 'u??h??cl?F?Y?#5??? ?`?\A?ba?????????6???_i???N?.?~??:?N?I????fOU???:??T????%?~hs??Z|???/:?B/K???*?b?????q??f?]Rs01???????1|?.??*5yYn?4???I??????a{???Y= +;n?0???Ah??5???nh?????!(??????|? ????A$?FHA?)??)?&???y?R?????]S???M?4z?_c????^l?]K?N?;1oG?K??OKo???????D??_V?^?B?sQ?8??3??C_r?*??(?bL? s??Q?.?????????x??f??????(???Vb??Y?xc?:????????w?uR???I??/%?P\H?? ?t??S????@??:???{1~_?wK-??R?Ri0???8???????Li???????V??N??*w?*6U?O ??? J????l??/???UN???????>??r?ykq?M?l????u?TyIU?i??9~????? %osz? ??C??z<<;?JxV???????T ?J??? ??_0L{??K}?????9P ?C???V;iwTyM?Yd???V_????8n?R&vW?x??g???d?9!9??????? ???4?:"U?4O=???Q???G"-b?D[?>?3??w??{????.???A== ?{??u?.?=?^>???kr??rPEwe????I?[??&??H??`??l4??![-?? k??b?????kEF+W?F?Y????????Y???K???,???=03???b?'}b??>2I?/?>?&????fT????N??R?K?%?/?@y??gL??z?6??FzDX?"?JS"?[???@?=H`DV4??R ??n??n?O?Z??N3???N??N'??v??veF{?2?}??V{??Z?????S?i???&?AqB??m\????.L4????V?D??3ae'?????????)s&I???? +??e +??? +o?^?[???????????????gFg?SFW?O?F?F? +??(>4??|? *w??z? t?1??L?_LM8@`Fk?/|??????:???2V???t?s?|??e?S?Z?In3??N?w7?1??6???=???=? ?pFi?q&?G93??p?0or@??*Xr}?/+?q}&xt8?@???l?g1?8???{#r#?P?"??????R?#;????E?mT????????:f??>j?G???6o?q?? ???s?7?>???ye-h ??K???????+@??????K}?a?YoS?+O+??:??c7?C??=qm?%]?W??? |?{V?Y??X?`p?????W*|??h.@?X???7'X??/?????????????????"???6??o???7g??,:|??q?{2?u}????M=????8"???? Z?Z?9?R?|6?}?`?z?1aZ?Qqz}?e????aE&=?+?C8{?[-vo?Z???>????V?A7?wMZo?`?V???[?up??.o??#4???????x???"?F$K??t??t?I?X?h??Z_R????(???$????]??+;#K95????Z???kmk?~?m????!??n ??m ??MC(??Cam](XcV????????O?{????d?P????)??h?H?P??ZoZ?vwj8kwr<{W???=!g??8?EKl????-???6???~]??}M?5???q??????`W?o!#?5??(??o?g?kR?;s???? ,C? +V?? i??x,???^?33???c?C?b????iN.X??Xn]?Po[??_?-?{\X3?X???$???8?1>f??#4?s?x??i8o??W?q????t?":Q??s?{e?J{ ?????i???Z????f%?5f?s???,?%??????????.GYb??0q?)??6??????????n>?@?MB???B?*?g?3% ????2t??\???V??D?h??h?n?o??`7????H%??l?ey??W?Q?/Jk?K:E???????\?R?a3NY)?? 0{????y|?3g*??o=?????}3????`?hh{?>jn???o?S?jva?5yk????????X???Nju?eb???\?gT?Y??{?H $H????? ?? M$R???8,xpD?up?8k;keA at E???k??ckCi??#*? +???????wN>???-??nb?????B???G??-9mm??ikZ#???0"4n? ?_?L?p????h?,&???T?A????-??L??9??d???7_?>????&8.J????s?[????s?/?$l[??5E;}K?n????I?K?&n? +???5t?1?_?'t?Rz??Hu????P?O???d??????????}*?j????\????o?d???}O?????A??s???H????D=u?:?I?R???/x\?????J]??!Q?]!Q?1$z??!???4???????'??????XO???|,/p?%???EZ???Y??(?L?O????7?e??y??}?=????{\?6???\??9??G?? +???????#w=????t????"?????? `?!???i?;&?Ev???n?t?O?S?7N(-??c9???)Gg*&??sx?K??????[?# ????? -?? +=??????;?j?T????knu?????T?#??@qgl; +??v?%'??~(?BB??,??e????p????? ??bJ?,?J???$?G?,s/???/?RR??-??t??e?I?oT?Gw?^??,(?(??P??Gv???$?2r'?>:? ?T? '^???.;???^0??? ????>?&?4?4 3?h?3?l?5??nH??/?b?[~????l???%????^????9t????4??????b?????? `????h?C;????4??[N???]e?T???:?V? +? "??d#u?Jc??T??????Cj???U6SU???[hIX}M1??Q??1??S??^A???)~rO-w ?&@H5??"??/?:x?`?]-???)?&?3r7???>0?C???$??+1??????4?qA?{?w?7??J?Fn ?G"F?x"????U?b;?N?l"?? ??8?%???G??P ???B+????_?????7?@)??|0G?A e? ?? %??<y ????????XOl!??,??? ???O??,?K?j?=?vh??t?6:e??i???XdhC???3C?!????e(GYG??t???db)???/|#??S?G??>?? X&? r?!?^A%?? h??t?g?"???I??v?(wd?`????C????C?Gs?????f???I???b?????,??5??_???W?,?.??^??6ik????%?lz?=??A????Y? ?s?,P??S??:?c??z?0??z?2t??????8??qowX?????5?E???W??g? ?T?????&?Q?(/? ? ???&{h_?????=E???#?:?????{???d???Z????p??a????????mH,ov?%?N?r?K"?&L???<???op `?k[?n??hp37?x5;?t??q?*'?J?)82?!r?M?:??HS?m???\????M?sa]wf=?J??G?_?}?!`?w6.8?????b?T?4U?R-f??6???????c?\1i4K?1???6?q???}?K????I??t??Q:?????ng^ml???e?k{??????? ?C??m?r7?a????????Fs?1?3E,?I>?f?0?AU p??;?'u??mt?????????N??g]?m???5s~Mk???W????CPo?K"????????!???c?Uf ?Q?ZA??M?G???&Z?J?m??s|??_??sA??YX???k?n???6???s??????O??o???i???? ]?p???:?a????{?9n#??{`???K?E$ +?En eR?Z??????&)a?sE???>,??1?????O?!4???O8?_?,|??X????o?4?> x^????z2???? ??????9?%K?n???7W?:5J??u?27Y??????G\?I? 3JKgK?kA???"??J???t?OPL???????xZ +??[?:?K??\xN(?a??8 5Q ?????j?2okH8?T?????v?? +??5B?*??S$??)?j?????????A???'?E?????? ???????- ?w8?O*?d??[????uxOU? ?e??~W?I???pSE ??????L?9*i??^Y/p)???(??Q??-?5????KXd?2??n;????k?????bR?{P??l-nO???: [????r??&?W???'?Q??i?`d?k????^?k?c??=MN????.?wg{???:?{W_u??E?Al?? ?.?0 at 3???2???2?T&X?????p??$^????)q??h?Jo??????X????????????o???Z???Mj?%/v???pk??????>?? ?? ???g? ?*'?????? ????}%????Y????yCy?A?S?@%??_?????LP?R??%???????/?1?o?1]?OL'??APty^???9???9?{/ a`?AT??QtO?????P9e ?S? ??M?6,??2??L0?N?-2????c?R???%^??7?N????ZpWm????t~?:Dx???x?? ?E[ +?????r s0??z?4??/QE???@??jo?Q?g?C?f???a?????gH??Y2???1W?v?WZ???9???'???w???/H??o?=9I8?? ?A???.?p?`h0?j?=9 00?=0 ?G?\D?B'??@???W???*X??z??????q+???+?&+Ef?+???5 +?`M?U?????f??????~???%???H?] ????5???_?????*5@?????a?*?@?-??;F?a?d???B?f0?j#??j?Oi??ky???2c??v???g?U;mJ???y????k????:q??6a?v?8??:?????y???w????????? ???^z?>@???@?9 ~;$?=???@?y????????.????5??R??i?;??T??0???(eJR?%d?c[W?????N??1I?d?h?]"??dt?w>^?}????O?S??~?????O,????x]?!?^L??_? w?tP?RqM?R?J???P:?z?g?~s??u?O?n??@?!???rH??]???0?#$???????5g7?[X?u??t?a?)??? D?!g?r??n?J??t?w?0?3??C??@?Q[(???`?? ??a$ ?pO????'??{"??????????tO?;? ??L??e?????p?X??jE= o?B??-?#?RK|?W?? Gox??ao???r??]%??{????d??????Hw?E???+????z???L??????A??/:??` |l|?h?|I?M??(?0?????h c?E?w|i\?4?8???XB?????????p??mHg??D?d~?_?z? ????6h???C?^?u???tOB?G +??t??Y???R-???[m?n?M^?????oQ???m=n?Ri?.??????kB?}?g? ??o>??:?|7??b?q?? ?=s?)??]qp???-??????14?h??1???b?y??V???l?LS?D,?_7([??????w;e??8g?W8? ?s^??yy?pZ>@8. 4????????*?~?? ???=2\??x'?Kh?]??? !?-????#b?s?F??;?j???6kcf?e??:dE/q???y??\?????.|?mITy????n??^?.?.?UZ?pn???ye???]6?}/??I?g?T`_?=?????1~???A??I rFE???`??8?:+a?zU?t?#?:.?[?j??????9? ???sH?????????y1???E??s??qi?????? ????????????2?'?`?O?M?"?0>P?>9? {???c?,V?I??=?nY?4M??????\?2Z?????7b????^????fV\??????3????8?>c?pk??????U??9?=???h2????]?sM?D?dwl??+]7????I?U?R"M2&?Z,??&=9?~??)? ??l5L?????]?a5?{?V?7ZU?:m'?xJ?K O??e?????????P?*?V?dT?.??]???q?c?^?\????2????z~????<??7?? ? +?)??{???}???;W/5g???? ????q? ??i?"???B?*?S??SvJ%???/KeI]d?7N2???H.H?#?G???n?`???o +??B??T??e?{??PX"F^??RV??jz?t??"_??y????c +??"?c-C?Rl?ss??9eYv???????2;]?3?\?38??q H?$ ????t???C?{?-??????Y?1?x9??rdWX#?B*J?pWVTLU?/??S??n?r?aXi?qHI?y`??JV?m???T??l??_?V???>}?8?????? +?>9o?{O??G?3?1???n/gH5)]??g%w?U?H?6??F????J1??U"?g [5G+?j?n`?C?????f+2,?*?F?T???U??vf?~[?? ?????z? ?z ??E?x0??y"?m?N?????????,????e????d:????~???|???|????{M????n??????ku??W?=??U#'?FN??3'??N?`??Q??Y?????:@?o??????M??5?a?f7???!??<]yn???o??!???4g6F?LoL????g??P9??a???M{M?7?5q?x?d????????:?b0????|c???9 ?\??70t?l??;?9;???l?????23[P?z????????;?n_???!R2?? ??} ?Q?)??????R?p?7???[?r?a?S7?[`???p?a?;???0???? ????K???1?i??O?Y>A9?3?d0??!?????0|j?b???????vps?L?^ ??????p62?4?I9)?;Yb,R????j?????#?Z???v:\?K^3??????5?????=???w???[?j????W?o????????7c???u??j?L??Ll0?G?qq?#q???8???&???Me??Xm(??2????w????1???1{??f?Ox/@|???z\0?}&??p ?H???????a$]2V??2?){??%???\ZO??nKU?w??m7????????s 9??=????}??U???WS:?b??j?B??R???XX?0?2?? +?Af`??7?W????r?9G??????2?s?*?{?*??!?o??????[x???}s?? +???x?+??.v?-K?*r{Y?}?*??T??b*?r?+?1 E????W??a? +?? _-??d#??dPQ?? e?????(?>?U???3???? ~?????k????5??]??`??A?v l??????? ???? +?? +??'?????f?'??GX \?9???G????R]?q1?{???i2?4?m?U ?Cj?9t?RMY???p\??p?X?q?(??D?6Jj?(5r?:?r???? +??u\??.??9?~?????}??q z5?[??? ??x?4??S??p??Bm-??zJkT?????'?R/?_SIy)?_???????;??c??>H?a??&@??< +???mv??p???'?'?!:? ?x??@;_?v???\?l?????kx?[v1???f??Y?e?u>c}???F ?A??V??^?????'Xr?'{n????^\|?!?2? !'??\?.s??? ?d`???.?O????=?s*??????N?7??O??q`?I?]>?H??????L????jnDz7????A??M?]n??????0?{g?-?????t?z9c?? +Lg??o ????O??g?????pv???_?k? ?In??r??? ?r??8??~a>??????L?s +?w?!???b??????c?d?u?W?[\?z\y?d9y???00??G???G????`?n/.??p?,pC$?.'=?P?z?????^d?%?$?L#s?b??x?$g??c6/????\7???b??????=y?7s?,?3_q??`K??YL?2zV????3??M???SHO){?v-????y?8?(?X????1?#???rxr( *~W????????1?7??h???5?S?z>bD??#T?E?tI?J]?h?.S\??????? +?IwHR?s\????????t?O?M??`S?-?tC??)n ??????1\???? ??O??&??8?fG@?Mq@?l{?IV???????6???6w?m??QO?[F ?l??"h9??w5????St?j?? z?qGu?{??????B?De??????%%? ??C??C???]m[06E??i???#'d?bKp?bsp?2+?sef???A?+7 +??@A???.??&??K?????C???q2T??????(??G?a? ?E????L??s8?y???????|??W???r? +AJ? q?????r?|K?3????A????k??9?K??%c +?h??? 7e,?k??s???8Wk4.U?9n?4?G???????.V????( +?????????77?K??????dA??$H??Wy???g?Z???? +2"??R???????<`??= ??@7???#B??My???????y???;s^?C?>???W??jGs?< +??=?YU?]?\????f???~i?3~??/?????j??P? r"? ?Y?????y???????? ???? ??aUa ??!????$&?2?"c????2????/?u??/???????Fyzn??:?O?2g?_J?i?????????&??'g +?.???98???Pl'????Ii(?K\Pi???)0???t??h??+0?:?K??sJ?\u?Yn?"????r/Ua?,?hQ$6?&??\QpJ???2.??O?^P?? +r"?"?? {p???&`+?!????n9g? +?? +UN??r&?+??r+#$??h{?9?1?"uTzE????&a?m*^?c?mf??a??DL????????U??????????m?$?????[nJ&n???Qp??????y`-??d-i`+????l71????m@?`?k?s?]0?_?I???? ?A??qQxj c??0?w???^??C:?>???B?kX$}MKw +?????Q?Jg????^]????ez?B?g?Tf??J0R???P?Pz???=??tK?]-?K?Ut^????lz;???????%t*??N?????&:???B7???A:??FCO?H?e:????~J?_???g2? =#"????2?*J!???o?b???`?8+??????g???+)t?7467???-?Ca?4???a?t@?@???O???(?i?b +?hHq?v????????)?a?)Vo?O?e?S0(?????O??Hak@]????????U?EU??*W}X?C?'????2?i>{y???!??8?E??????;?@}?_SO?|??,??Jj_??j?h??5??&???K???J???V +kS?????*u??b?.qy?1?;???4?CII?????D?$&???L?)??!??F&?=?}G??????? `_?k??>?6f????7?E???u??F]?A????4?k3 ????? ?Ez]`??E\??????)uj?d???2G?2{????????TFdS3?????N???hG?.????????? j???%? K????UgPsj ???2????v????R}????,)?m??u;d??#????rK?{rs?2s??R???y?3??;? ?????????}???C?{j1??1o???S?i yM1?*S +?"_?-???????J?lB??$?8?Zd?m??d??AyA????? Ay?w?????????F#?i!n??/??? ?D???;??|??"???D???TUI?*??0?]Z???,Y????? O?>b?&?N!??|??N??=??Yk??*???c?Gn?B*u,#?#?]lWs??L???+(?Y?fkq?????%?? 2?e?\g ?0O??i ???'2m>#M?c$?;??b??ky?v?P?k.9]?(?"[I,????????W6??i?3:??9k????:q??M???*????Smg???w?????EOd?BF??0 ??b???r"?6? ?cY?kB???/?w~&?_?D?P?H?8????????:?a??F??r"Wz?*"Ku???(??[??*)?&??5????v?7?????j ?????RU??X?_?YW?GS1.QU???Cz#K????6 ?u?`[??b4??$?m??lc??6??h?f??~?#6;??w?Z?^Y??#?vzGd???????Q??tw?????????yBk?`?}??WH!?R'Z??UR?FyU??T?N??.l,51?%???uq???\5$Y????&??&?$?p?'?-?i#?%xg??Ix??" uJ???^8C????.??V???F`?}M? ???E?????F?t????A?I??8????Ax???b?????\=???"?Sq?I????7???}@?{?^?? +t?>?????*?Fw?2????@?n3?[|??Dy???p?0Ch?`?h=U???Qt??q??s&???O???F?L??'???rl?MU?t?7X?c?\c???E???Km6?P???Jq[%M????[??k??+?9f?q?r ?*h%67??f???eO?Z?kG?A?j7Z????J??r?OUE?Lm ????mL??@?6??$?T??m??GE??UpI??n??DyAn?6p???????? G??0?>??=]y?6?????:???K?*?&???0???&??*?LVI?t??????* +IUah? +Bs?Z,{?S?!??&??V[.j??{C??'C??m? n???+?????,??7??????#???A?B[ik?WU?3B?a *?????*????w?5E???????Q)?5Q+ 9Q? ????#?3#]???#????#n????????a |O?/?f??S?d??$??D??8???l?j????!?? +bc??????Z?F??VN?d???????9???E???4??x?)=~?GZ\?Gj\??????)?_y.??????G ?????9?????\???????C??r????'??01@??!Z??K9??d?Q??????eXi?hXa?b\n?iJ??7?&/?X?????l?Z???kaR??????????????????^?????6P?????:???v(????1?ul????5???rk?????? ?Sm????? ??? w?O?'???6???*?plc??????(V??d |?????????[ +??L??fW?,??LZnu?KK?+?%AUq?:???CS`yYk???+??0c?5??????/??fAe4 ??|?}x?:Gl?:?<dY??-V????e???O?D??3??????? +G??n???????&-?W???k?eM???Vu^?V???O????X???J?ju?w4?"AMT:?gX???s??d??~??? ?o~??Mp;G???A?+v?\???`?J???-*v??W??????????B?[?F?S?Y?U?W??8?M?h??W4i?w?i??j?????+x??I6;?{I3???yh\???F?2?XX=SP?yo< +???????k?y,b??\???e?5?????2??]???R???^RuJ????:?y[?\)?W??d???s???6?5????????????'C??1??' ??F?\??'A_? +]???fQ??&N??$?~?4??J????X??S?XwH???=eB?%e???2?FP?W ??jAq???j?#???????S?MKPZX?;h???a??Cfp +2??!-????4??????E??`?()X+^J?m?yM;e?7?&????lv?E?????q~A?{???uju?vZ?5???R?rP?~3??B??aIx????0< ??8,' !?????6??p)???1;?(?^'??.?:(y&tB???8v?-??fA:-??~????N??*R?????J?????k??k?D??ym*???F\dfEb1323"??I?3=b#L?T?G?&G???? ???&b??!f?-Q?ZA??>C?N????????????0???oRwn??O???T?ue?s??????:?????E??????c??#?"??_Zg???%?o?'???az?L?p?5??g?o???q7?R???W:?&:????h?l?mD?5v?3??W8?e?????L? :?/???3??cH_FD?}C????]?#??fj????^?o?Mz??c??:???X[?&??]?3??g|p???~? ???]????? +#???w???5???????Q?9????n?[F?\?Q???????Qc??d?c?g???? ???w????E???\?{T??????? s?8?z????G?%D???%? ??pI?$\AAQ??XT???v????N?]?vm??\????;?j??g?j?U?????s?%^gd?r???|???_d??? gn*~3??,? +?(????7?HG+?_?i??????Y?QN?}????'x??EN????nq)?eD???K?k?????a-??9????r/2'5]??#?1??(?????8~?o?????c?2??[??7Y??8? ?L????1????g?f???$O?o??M?@?p>?????M?(???Q?K?s|?'7^:??????cq6????i???.N1????8?z?3?(?9?????^R???????? ?5??a)sZMW2]z?Q@???r:?Ym^???vz6??O?v?????9????Y??|??????a w???-??Ln?\????[? ~s.?/0?(:5?IGO=&zJ???????Z????/ [?_??}?? ?bL??=?O?K????Q?'~?x?????????{!t/.????x?J?8?x'??c?????1???3?19??g?ab?{g?1??KA?????A[0? ;?'?#??]?p?? +?????`?#6???Y???XngW??p?~J???'????j?~#U?})?V??\???+v?_1?w?????d????~?X???5???????PlH?DgR,:? X?MCk?-???%??M??k??d????????RM???:??????\?xM?H???|?]?c??c?_rh???????5????Gvf??g?)@o2???@t?.@?.?i1hN?????? =2???(??.?Z?Io]??RUZ??"mD.?M?v?????????_*m)wek??U?WX????\???x??2?d;??^???td?@[????_???h???'[ ?!??\????P&T?bev?T?????7?????-k??8?5?%????qSi??BiJ?/????I?Wyy?m?????V9M??~h???5c???w??Z?*a????=???=*G??U ?1&?)1???E?*?._??z??m +??)J??r7?Q?D?bJQwS??L?r)]'??=z???^.?[-z(?B?? lr???Y?2??3Q??=?????bi? ???o????(???j?&g???Z??d U.??,#??%^?,??h?+??Ru????r????n?u?????eL?Z??Km?4?!?xO??????f?t???R?Y??Rf at F?_?????E?n$3??&?\vV????mec???bM|L!????0???????Y??:?g>???Qe?? +????V??Bzu??u???V2~@#???C?|???r?0??.?: +O??? ???k???????t?}???P???14?g +>O\??; ???????^Y/9?J??G??G??-)?4???q???????cM?. +?.?.??? +6???k??} ???B?c???x??-???3M?M????U???????>8????t?!~? ?F?(?8:\???@?7?h?x??Y?7??G????????"???; +?? A?????s???Gh?O?0?O?<? ? 8IL>goN????9>?:iH.P??$?/??q???????T???o?,??{[?C?9?????s??vO???????9??3p??t j )???o|?&??&??-??m??e??w??^Rq?E???Q?F??????o?f?????$??gz??*| ???mC????]??1(<"?iR?P?m8?f?ab?#?Y???A'?? ?0R?)??l"3? &?4?W?~d??AEL?&?G??????jt?I?.??m&??L????n2?6?K????X|???Y???????? ^?W???04?A+??????.??f?[???l?q?????P?d????ZOh??l?5?????X???_?L?#>w????????A?-?????o???Tl??v6???S?+ZH*T????????hd?=O?#i????xr?_??????=???????7#?gZ?Cc,?'a?g??H6???UB?,"?KHW?Nj=Ge Q??t>?Vyo???????6?_U?v????I???????4??~?`??#9????vvK9??dA???????Ci??4????O???-?h?~?????-)??n??????p? E?m4?(?????d?g?4?9??B4????v?s?6??)?SZO?;?UGV????(ol?????+mG? ??8yqD???? A+ +?7??F???[??z?D#M??????)F???d?:}???ba5Ee???Vv????o??e??? ??k-??8#?9#???f z???????c????OGg:?D??BN??*???2??&RU???%koq??s?m????????+?8??%/?w?????z??&?E??????vohM???????vHPM??Z?q????V?}??:?i?C?,U8?T??&????B?2??R??*v|6-ph?&W?wJ?7? %m7Wp=?7??:'?[?.Z???Vw???)L??F??y???G???J???p??r?Y*s??En92????X%n5Z?V?"??*p=????q???\/?\s??r?????1s?0?p????????=|eq????E??c?y???s?L^cU?5I??I*???B?,x*??L??????Q??v)??3????9?w???lz???kh?"!?jaM??>???FK}?T????^?2?????G???M?R~@???+7p?rS4/0CY?y?hVF`???+-`?f?7(???f???????gk?u?????\)?????s? ?;???`z???8}:??_??S~P??G*'d??CF*+d?2C?)#t???&+=4]i??J -UJ?? Yg????|?0-??aj?M??? ??? ???r??L a????P???z??????(???vW? _? ????A???Qz?p?9Zi??J?????$??LSrd??"? ?",?)k ???'?6??hL?dH???0?fHk?~? ??Z;???P?g3k)f-?/wpG??r??!^J?TZL?Rc??3L3c?Vr?{??????4-6USb? ?b? b*??1?? ????C>1?9o??j2? ~`?????e?d??????At??6????uM?6M??i?hBi?jZ ,???4?,uX,0??(???Zg?aG??Q? Q at t6Qa2?R|????$m????s??9o?tO????d?m???Z??~)?kd- S?yi??wW?u?????????M?l[??m??e??/?De?>??U^?\???hg?4=?M?u?%??fq?????0???MgJ?t&w?:?????????a?Y?k#/b-s?,????u??}Ua?r??sU????9??????Q??L? ON???i0??3???%7{???????u?????%?v???????^C?Yt?9?El?J??%???XH??Y5?+?]4?y????hF?(y]T???bW??\y\n?]?U??0\???t-6y?????%???????%#??????b?m?fG?KdE?h????)?0?R????U???T?????????P;H?k?4.8V??)L??A?F?t[???Z ??K?j?????5??i ?y]??X??jPU?????Zf??5?k=T??B*B?q'???4GJ?????F?~??P? ???????i???P?bC34 T?~??jV???k?n???{????n???^uQ????F?y???6?B4?P^??G?V/%c?&?????X???1???????%gmk?4 k6?[WH"??+??J?f???~??~\.x?????] l ?? n!??f????x??L????{?g~????????>n$?9??Sk?r)?|????p4?cQ{l??l???hoJ????f~?z???>????u??c@* ?\???(??0??0??7?? ?/??? +????h?6????m|??u?pm???n???Sg7?f???U?f?N?????e?ga/??? ~??W`?=r???zS??|?uf 9?E??~??=? +??(N??q8??R`=??l???;f?4d??^?v????????M8|? Qf?????????' ????O?xO??'I????)????;?C?~?U@???@?????s]???+?hi?????;?& >?O??n7??Kg1 +?L??G??2??=???\dc/????????????' ??a2? _???????S??S?QH_?.???? +??}??Z?%.? ????Sl?I +vB????O^???8v?o?@?z?A???wJ at 7M?????'??KU?N4????? ?_J?fJ????c??????q??9??#??8N?DYQ?????x ?J]???????Gs*???q???>?W????? ??@?e??"l??'{?Z?E-??? ?????bQi?ux?2?kscQg?T ?7?W,z????V*:v4 +??%???????? ??@?ezU+i??v?~K-~?=??~????S?oYU???? W?? |?.?????????[??t?h?????9"???h??^/i!????????? Gy G?I??6??C?;Fe??(????ss?????mC?nD/?8?&% ?L???Cv???????h??ru-"???????Z??Q??????[?`??S+?????=??? ???&?B?$i??I??!?0???1??l?m?v |\??6?`CH???4k??Y?l??v[?*[?-???[w??n??[??U????Y;iR?oOJ??#??'??y?y?y?????$?_??"????/??????d???<Y%?T1 ?d???????k["c??#??.?*??2??)??s?"wf???3?ss??:O?r0?????{]???n=?z??????????WF??+9Mdx?? ??????ar????????8?:O?u?r??????e?Nr?N?u??z??U????id?`9Hbix??I?????&??<w????q??#'?s<+gxVV?'?#k??e??Yz?'?&g?c>}???>f??0#|J???2???5???2???%1???e?*??+??qiS&???amk)?&???????????rr;N???????)?XH??|?,?v??l?fR?!?rS)?A,?-L???D?'???J????dy?!K?????-[?|8?c?0???xA.b?jL?-2!Rl?XqF??)bH-BRa@=?>?z?? _BW?7?Y?.?5B??#X?12?z???? +?A%???-??|~??9?i????lF?d&4?1?)?hY?? ???0X???}?v?j? j????S;???e??/"P~?e??/?|???-??yC?xw????{??d?PKl???&8??O????H?c???0?/A???z3DC? .t???4????@??$??ux?_GK?+pW?W??????2??z???????y`?P?l???\???(?%c2?M +???B?Q??\???????`m?^????V? _?(???h1/?m????gg???F?/??]????n?a?o?.?/?y?-?????5K??;??#???[???{??B?Z??? V?l?mxm-h?u?c?? ??6?u??? ??????? u?w????B?Y?j6t???O??[g????*??#|>`c??wa?f;????????5????-N3????.49??tv?A?j?A?5.???,????{????`l??????* ?zY0??????y??j?r2???(5?g"?n:?????n%dCU? ]??????4??OA???? ( ?E~?_P??! +????'??+???????>?7I Q"?r=!?.???N??%??;z1?b*?hE ?D=4?%??C??b' +?a???????? +r??Gv?O???O? +? ????d?L=K???? ????;???G???=?A?#C!?x??5?iAb?0?0 ?CvC??U??????O$ ??????}??=e????d[???M?D??rh???v??!-?(I?Cj"??lO?????? ??cs|?gx?W?i???W(???=?;?Iv?2.?u?k??E m ?z??? +?S[??l??Gv?GUu^Q|bR??#??? ?<??cEDDA&???E?2FD?&q????T%????:?6???4 U?MZ?hk?????@l\??u?{????9?9?wv?P??p?6r?6??7?p?,r#ky? fN??p?]?????5?x(?7???q;q5@ +??????0????R;$+? ????*??f?h??&???-5s?5??7??????$6?q3?v ??`????????q ?q?&????\O???~8?l???M??`?v??????9? +w?.{X?????0??&???G.Z0???g???!? P5?k)?[o^uH?c???q??, f???%???w`/?H-"?!??3????P??t???P????F????6r?????g(??pz??K??+?? u?5?_???_???3?9??????\????+??>?u????6G?<g??>=??:???^?"??`?o???'(??7H!|Hp???O?>A?\?? ????h???R+A?V??????{???h?;??N??-??(?oQk7Q?7?1n} ?p?4=?:z4?O???a(1O?h????????T????PGS}????7??:??+??4?5?? + +???????,?e}I???8????|?>?*???_???/|!???I?# ?????~!?KY?*?/?~-vWb??%?DYl??4?E???y???`??~?0=????X(Q??z?=^;???? +??pxb?? ????t"??~????9?8???S?????wQ?'???'o?5????%?#0>?.????n?\F????Grq?\??.?* +?xV7????~>? ?]??*?-?m??Jm?F?_?????c+9??U????????p?_???G??oC??iA'V????'?P8??O??d?g? ~??????Z?c?k75?K??B?vr??Me;?Ft[??f???S??i?Oz??????????8$W??? ?'?m2?IT\?^?48r??O?Ep?c???t?j?b#???? sB?$?F?]O?7???{???g???'??-?7q?????;???L?p?a?PP?d?4?8????c?Bk?g5_?????r????????????????m?????~???=}8???$;bs???Q?4?P,O?'?I?Le????IW?U?|x??F]????K???????Lu?????? +\aK?????????&?m;M??>?uD?? ?s??G?? ????ZncQ?M???LVm? -????-?+?B?j???U??:U?5??v??m????Y???P??-???X?qi5????Q???+x?????~??????/ ???9????? ???!Z<0R ?j?C?j?T???J?e????p{?G WZ??R#|?b +?DS??Mf%??`JU?)[??E???lZ?(?:EFlSD?A????1?????e4>Rh?U??V?c??0n??@??(?~???S?t~O1?S?y?&???d?V?%@??b-Q?????"st????e??)??Xa??2??l>?? ?)0??#o)??H?V?[? +?N?o@?3n???0??;?q?????W q/).?e??{??V????J0)2!F ?_??* +????(???(H??(??? *?0??,#? ? ? ?`????????K?F?9i?[mb????Mm??i?M????M??%???\?????}???7>]???41?T???4.?Y?q?56v??g???Y?5z?M??|??v?O???[?y?%X?jP ?? ?????xV????????iI??6k?a?? ??l??D? M0$h?!M??EJ4?P???&?J?????x\!s.iX?M ???8?Bc?:???j??? +??? ?A???j?e????Ls??4oM0h?q?"???0N?X?T?c5???QF?F j?QH?r Mk???? +L=??????O8???? +{???r??\r?@*?qi?.??)?x?L?gV?1y*???Q?`?4??pS?BMQ +1??PS???g(?dQ??J???????AY???yT??5 ??????F?????5p-Ag: A???(???Eg??Z>6G?/???l??????V?%L?????L??%^?i`????\????4?U??6??{]?y??=??zg?g????d??V?U??2??AH?;??pO??E?=???]\?]>??????5@????i#?$=c??^$??j???X=?ur.Y???????Rt????????y?g[?cs]??]t_??? ?Zrl?g>????"i???y4?C?????-???????d-+X?V?????e i??m??[??_?e_E?? ?\ V???&???K|?&1?v-Agy.&?m at s ?'??=?;?A?}V? ??Jx??,T ????zI?Y?6?l?p?V?? ??\?V +???V?l?|???Mz?=?kW]e?#y?LCs,G?Bo8?^xw??`/??;?qrr?E???,%?G;???#\?v???N=?|:UN?s??B?c?;~?4????? ??]\/}??WW c???u???????i????z? 5y?F?Kq????r?\?????B4g?9???B?hx???8]~??k?u?x'?i?8????????,?????Wz?*???~m??AB;??????\????????{?x}K=_??;?e~???????]??????]f?/]??c???????|??? ????. ??w"?Q??y??z_"@4?????[??}??/s/?/?????p3_?pnsa>?u?z?*w??xC'?Q?w??>?:?:?5p??? ??!??;?_~???'???9??9>??]??p`Wq???Wh?(?%?'U?"'????{p??????3????????+X&W4??M??????9{??r?3??-??[???q +?{?y?A?&5yC???~????p?_??0*_??I?i??N???b.?? ??:B??@Cg??1%1??????&???YD?r?o?!?? ? ?} ?????w??6?n? ??'??&C"????|?u;?^???#??D???0f?SO????L?"b?k?j??F?Y?^d???\l?T????V2?B?[????w? ]W?8?h,?`y??O@?.A??1B5OG??:YU=???g????T???R7?J?-*v/U??B?/??}????)????{??????z?w?]Y?v??3??O ???yr????vq?E>?ow-??T??`U? QY?0Y??W??y???+A?)?xg*?;O??%???i?????^? ?????????Rj??J??euy????? ?p$K??l?f7d7?nHv?9v?&??I??3?C?(??c?""?v???X??*? ??VDE?8?vZ[??0e?I????w/???i??????eB?r)?-A?E&?T??8?ONR[|?Z????????B5? +j0?TohR?a?B?e?2??b??# [y6? ??:P???U?6Y??? +??Ue?)h?S??H? ??*??Si?|?2"???J??G?6?Ra???*W??r?~)????J???e8??o??{5@?,??Z????0??Y???2JU?8?5'+U~k????Zg??:[^[@?l5*?5?m[?B?*?n??????????;????????????Q???p??f-???????V~??^?j@1U? ??'Vev?J?&y??????b?KG???*tT???$??Cy?N9?)??3e???5??????,???e??,+ +%+?C??|????|?I??6V!????????u??,?dy??*r?U?????'???|W??\?r???jW?k???nY?;??|V???J?;???KJw????9'????o???B?G at +?D??s??? +???(F3??r'*?????}?J??(??TF?:M?nS??M???f?V|?E<7dpG5????????e?,as??y?}H??2?^??????2e$*=???@??re +)9P???j%?5??X?I??????????qeTl?_rCq????Q?!??p??+?c?H????b??|?<+(?W?SJ??c55???Q? e????P?&??? +*.???"??^?1??????U??FT????4???b?D?? x&?N??g??????J???G??$C?@M +?hB8Nq?)N???E??s5:?QL??Q?z?/???N n??? ?????????=u???oPsKCQ??k=y.??k?*?]??w>??Z?\?????x??!?6Z??&jX[????iP?M[]???S?V>l??-??L?|????????????YA}?0??@ 9????3???&%?;??b??????+W e1!<:????;]?^?????]?????~w%?t?q?p? ?????~??? ??{????a +? ??.?F??v?? +?kp?#v?!B?s?o??????????6?Px??|lx??^??u?????N??s?u?????C?F??7?X8GP????e(D? ??ga CxP??R?????w;??v?v&v?n[??? ???!???r?????????Q????!g?? +????a??aU??q+$+?.t?p?(6~??????v?>?> +?? ??(???K{.???|sK?? op9{u?S?4x ?9?)?????k????=`o??&E?f?(x??`n??O?G8??pa?ar? ??????J???C7?F???s ???? ?&???]}|?????O???1`X????*?8=?:"?$??$??$?~?Z??????t?+5?g-?su????????=???]??Kws?2a4?????f?Yg??Yz??r???9???;?_??z?^???j?_9?R?tjo? R?? ??\????Wz ? ??3j????}??"??Q? +??w?7l|?`?}K??!??????{=??b?@&????????}?k?? ???^????I? ]?]?Y.?/???:???DX??z??>??x ??1 c?3??RA6?3???Q???/??e???\"?/??s???8????w?&??x????b?e???? Q?f????S?Sw?p ?k?7??&?S2?L?+????????PG?????2??_??k?6?7Y?o??N????SxL??????T&?o??z???t?E}^q??F?? +?? +?r?, ??\vw????,??PQ??????R56?j?????fl?&??t:????N?d:??6&??N?t?h??v??}P????????s???s?s???^??V???N?~??y?3?@]x?OH !??koa?)???b?;??9?;?D?\?m>???7??7??7!??=?????????+?$?????????H?vv???g?!?e?????[Xo?4??L????]????^\??+??eNg??????W??????i???????b8?0V?B!?[`??~ ???5?x?s????c;^??oj??????????RT?????????S??)??7(?#??I????MtC1p???#??X? +??r?b}?w?i?!?e?kN ?t?39J?????$Jfu? z????'??? ???2x?iy|??8O??S~??Q????w??????cF?5?t?3??=Xq?+?4?2? ?$Z~??l?h?E??????????x}????p???d??+?k???U+??8?F8?d????8?Xw?vB??????yLR??? [?? 2m? +3N4???)?w?????k?,?? ???|w??f??^&???C??Fd?????UT??2?e?X?RQ?/UX???????,???????YX?5???8 at a4??i?{P?????v??m?j???a?Q??@u?r??5??;UmoU??[??*????>??~?N?l???ko+??ZS??r??W^?#?WEu?O?y?m?V|3 +??C? ?4???z?b???u?R?3_U?Y?VU4???????N7?WacX??m?w?S???L?9?v|OY ?????)??se?i????q?3?ms?h??.??????&[ }?+NW?*?F??M*q??]?B?M??&?m?(??OkZG????l?^e????zE?-????S?6?QiM?Pz?#?tF?2??pv??'?P? 7?Z??6?????1??? ?7Uk?Y???k??T&lu?CYm????????1?4?n-??R?%%{n*q?O???%?}????JqGu?d?N8????>???6??{?:fO/?Nz?ni?/N??%Z?_? ?????+???+?????.?????(??K?}???w^?=7d?~_?????7?v?[ ;?!\????7? + ????kl+?R?W3???O??R??th?? e*i(W ????(??B? ??? ?? ? 7??5???????_????/u?n8? AX??v?6???ja??????L??? R?h???IZ?? s??)d?r???X???x0? b?e? +2S?F~???? c??C?????F~????}?#I?z?0??2]??Z?RL?9j,1??s????? m?? p&????AGx??d3?J??1??? ???????(?Q7?V|???U?\4??????.cl?? ?F???dL?p?HE?8???????d?9d85=?+?1=?3??????6l?R3?8?F??>??????k?F???_??(????_C?r?????? ?????! ?????D?F??w???????????1f??i?x??I3I??m??I:*5?i?x$?~6?L?q????}?{????} m??$?PL??p???`K?r??????? ?j?j??j?j5?????+5??0??9w~?r?F??????lb? o%???u?j?x????P?Xr_g?K??K X?A???=??c3 ???f?n3??Dn??M??K??s?? _???F?go-???:?^`??I}ts.=???K`??CQ??????fY>?s???~?P????????}s???k?q?g?!]?????`????]???<+?8d??o???E??O??????.Ar? +N9??k??Y???,D?????9?= ???AM?AY???FZ??????????????;|???)?~ ^s ?s.?p ??-???+??G??1t?(?+p$}?w??U??:$?6rM??????????`? ???b??K???????X??k?????&3?6 @?????9?u? ?b??Kl??????}??;~????????8?K?~?+?????8? ?.??XG;c?=? f?N?d???F|i????a? +T??0UbUa????????* ?Wqp??BjT???!-??,RN?*eoQ?a?? '?:? ???+%??RF>V?H?%???{??g)??|0L5??wu?3 _???2?C??~??c?&?U???? ???+/?X??U???????lWZ? +?DlR??G???e ????k???K???e u?????1?k?< ??&??]kP???KI?????? ???g2)?dU?)U??e? ?a.W??A)??J6/P??SV?F?1?V\?q?b.(6???)????????V??,D&?$"?@=??@???O?????a?I??????D+???4?M??,???J?NT??NV?t%X?R?e?L?.?$t+*??"??Wx?U??a~?H?C=??O???d???jx_??????8^??I?dH??a?m?X)???E??c{)k??:?,????g???lb????\p[?[??-?????? D????gYc?~??{?????x????ffQ =@?q?$?8?}|k=\3??Xn?K?v??m???2?R?)???njc7??KQ???????E???=zc?m?HM?[???7?\??||??????>???W????P???~?????g??????=??????????5 ?@,H? W_??X?????C1????_l?h??4??)?{m???)z??????P?!Y?q??I?e?Di ?? 8??? :s@?????h??h??h?{h?;??[(??? ??:~\???????s????????x?B9?_??G?@o?*t 8~?5>?w????in(?X??d}??B`???j]?????<~?Ce?? +z?~??5??,Mt?B>I.??h??*?U?O?Qp??|?=G????????????9??1???????????qe?K?!? ??Lo???0?~????6????=Cc0????,?c?F?d?ga????3???_?=?6P???k??5????x??????6"n????~?K??????g4}?7?????F???9?D8R?H?????O?8lp8?W s#?????#?_?s3??j3k??<^g?? +?a??Z|? 5?60X?&9cU? {P?*?'?<8Ke??x????g]9?4?z??qH????J?? ??=l?????4|?J??e?HSQD? +M9?7+?T?S??MM?4-Q??C??nM???????f?i??]Q??[??RG}???;???gUM???n0????g?S??e?R???h????Qat??c????1??m??,s?????av)?????EJ3?V??e?????>%F?R???z.???>Sb???`???DV?$j?NP??^???)b-+??????????}E?#RPD?(?????.?,?]PvAV at XD*h|?cDc4j:?????F?Z?mjl;?????:ij?:1???N?8?Uc&??j? ?-?e?i?9???{???????????r??Ul?R?)_NS? +????+?? ?y??? e5?*??Y?n?R?(%? %Oz\???/??4?=?? W/iQ6;6h~0?w?@???gJ??????f$????|k?r?v????????L???uJ?l?%s?L?+?b??$?.M?8?????v\S???????????Dj ?T??e???@A?0??F?n?[???e???a/P??T?4?r?J???${?&?W(??Q?????????X?;???D???gT?q???d.??O??? +x???? 8A?l??#R?J+H???*?3S)| ?Y?$?W?5Jp?l?X?2?aY?)??Q??8???????F??U??k}??????|?j#?* +??p?B???L???b?\2\)?h%????;Y?iJp?4?]?q?)?sOW??Q??m?r/??:E??kx?^ +f9-:???4????????z???-???y??2P?ds?3???nv?Rv_?dB????Q?g?b=&?x??q(?S?? +Ex??iQ??Y?h(?FP????1?-U??????g?X^"u ?f??????!z????uD%7a??4??4??,??O)???k ???,P ??W??"??s?/??????#!?j?kx-z???38?4????s?~??g??7?MQ?H??x?A0T#B??$?????Vc!n7??????\?w?{61?U??}?WK?????RX=:wI#?\C4??0?X?\?b??Dd?@#Zs???m??????????G??\?W'?B?m??N??5????z4? C.qq?8} ???????kE??? +?? ?I??Tz`?*]dA??R?>??\?Kb/??w?????Lp??y,??=????7|?3? ??S??1v??Bb??B????K??????m?[ ?[4?M??y??~?f??A? ?r?U4M???`w??v?O??4?g4? +?6??&{?C?N"??H??G8???3]Fg^??9?kp +????P?????O?/Kw?vw??o???.??QJ???????!~\??K??E|?f9O?MB?Q(o??7I?YA?P O?b*5???Q?1~?cc?I??????l???Uc*??T???:U?5??????????H?q??o?"?i??b*7??r?/?k???????-Nc4f4??Q??q???q? ~??32??mmR?j?/)E?Ikv?kv???U??F%?F?-?*?t?e?#?e?r-??e?? ??%?"[?Y??-??l??h+?W???I0w?????y ? mU?/???ZcUn5??jW?-Gn[? +me*???????*???,?le?(??RV?f?Zw+y??esZI?o????,???u?\???H??@?3W???|S????????d??++S?,???J??]??l?2??+-{?l????????J??????????8??? 2??P????cs \ss?1Ot?u@??P?s??7(NG??q?q$+??? g???E?;???j??jQ2?????|?;?????Q???????? +?}C9W????"????E?Ne?h???3/(??"??>????p???bdw'???*???dw?,?2%W+??I??2??Q???r??p??Fq2?p:?sR????Z?|?CR??^?@ ?+@?n?ry?^"??J)????DO?F{??q??)V??R??EzZ?????????????????[?iY|?????`{?'?R???}??J@!p?;n????L? ???h_??|6E?r?+????>D???Ck?,U????????$??ru???~x?;B???8?? ??v?????#r?4?Gi8?Z/??N? ??9?yO]?F@?bj???>???~_?B?"?????r-?YM%?xx#?>?9P? 4??fx??2~???>j???XDc_L}.?.at!?/??~jb?j=b>S??Kl?{?B????H?>???? y??????? ?9??!?,?? {??i?oelf=6m??C????????U#?? ????N?0H?-08?~???O?[ x.4??? ??z^`?Km??>_??? ?.??z????x???h<1?P?n?+??I$?aK????!{???l????? A'?,5?L?@?Uz????????0?u?W0z?>q????Pk\????TJ&r??&#?o~???2s +bS???Z?tX4?????c???}?:?g????,g?=?{U??|???c6??fp~_H?????X?!ar??; Bb??'8?????q?^? ???/?????Q&k}??d??????~?????l?l??l?kl?+(?K?{??w?4???D?????uM?6????b?x??!??????zW#t?9?&Z?&z?C??U&??Q?2?_d? ?.0???s??~????"??!]3???p???#?/?IF?# +?$?M??GqTc??3'??M?8??~?aAW??C?????R????L?g)O?{?edyy:??????%2?? c9??_?????_?u??g???rXC ??A????C????z??C??>~?eS????5?Eq=???X?6+??,?x??M`#?<3?j??O???7T?Q?Md??@.???????h?v [???O?v7?rU??v??c ?y-+?E?d=??????'?j?\0?>l????Nc;n#?? Xh ??w?]7V??PM ?of+?b{"???e???1??c9??kT?6??E??m?]08???"p?????????B?h???.?(?* ?d?l?????Iv7??$?I???]r]???? $?E. ?R( 3?????TmiG?Zg??Vm???P?UTf??P/Sl???-??L??????0?w??????|???|?1??????2Apb???:?7??~?????????m??/??gX??L???MDy?.??-0???9??:8??????vm?;??v???c???Q G=o?~?8?????\o?\???d???!Cs??f?p? ???2???????L?]??u??m???PG ???&8?? /+??1?t?F????L???h????N? ??ep??6???????????\#?7d?S@D? ?Jxj?i???Vx"?]?????Ex?K?z?[ ??N?Np??? >?F?????\????7?I0???b???OK??gu ?sPE?E?=p??????W7v9????ZVf??K??G??q????????t?F??!8w???Sc?lX??;?u?8}gHX ?B ?Z=??/>U7(%?$?2U??W??Ly?*?????nQ??S??~??G??^??}???.????????-??????=?O?7???d,m????+???+???^????x?V?)???L?C??r?zj?? ??mU??G/??K??0x?Ut?Ji7d9>???uC;???+?@?A?1???](??2?E??UKY5????????_??S?Ws?  s?`h?#??t}???mX?H?A!x??s??v??Cz?0???????c? gp????????????d?????6H?)? ?}?y"??dP 2????t????T?(??????S?`w5v?b???F???V?/U????]6?L H??8?i?q??T?1??8?Q?????G???A3? ?R *?-???~xSIs??YcD???b?G[D????~???|??>P} 1=?8jq!?1?!v?h5c????|n?_???????x??R??7x??q?a ??.??\,??d??>B]???5B>>GM O??R??!??A?|????_ ??y?????????6?;?dF????q?!?B7.??` ?V??*??$'k??5?62?xr=?S???q??*v9?0zW?p???Z???g?E?7??&?KL??sL?"g-??6p?o??i?? ?w?????f??V?lE MSS ????6???OX??b? q?_<=???E?~?w??4??o?)?????>???XF?P{?c?????#?{?? }?>?????0??r?_ ??O^#???g-???8??_????????6??E?nL??o?)?DE8??? ?q??N?????_3?~ N?3??? ?xp%?????7=???!????^aS?_A?*?;?3?yW??e?"P ?F}Ft???&J?c?5???D?R??S?W(?w)?wH?_I?%6?G=??????z?ob?'???x???6? ???[?g??? _?>@-????L???????'?E??????-?8?=?}? ??I|? ?N?^??n>???|.:????1[??S#T??6????=??C??.????:???MB?4i??m?6m??Y??i?v???u?+}Z-?????\q???9??6?Bec?C?s????r?S?C=?????0?!x\?S???????N???J$?R???n??r??|??!{???????c????Y4??u^?S?? ?/???hi????^?N?%I???Z?2"???TQ2?HO?-???|w#7MDn??L {??h+??v$lE?-xh?????? ?@Fn3??^?????Q??%[?wJr?- ?!??#S ??F?:?#w->?gd +m'???M??d?8{?8? +cZ?d??}z?T-P? +????U??[???,t/(???Up$?h?c 2{???l%R7a?mx??d????!r?B?W?? ?a??{R????m:/-Dk?jL???d;(??Tp?xbT?xZ??Dn^???h4?u???w'?Q??????wF??????? ????????(??m??u?b???>?'F{%W?tI\???F???d5k??-?#????pY????g?|???w?V???c?Qh???? +??????g?V? ??Z'3ns6?M???> l??????&q?K? ^????W>?M?? ]Re??aB? ?d?"?;D?/%TS/?Ke??_?'r?E?fi???@?*???-?5?6?RJ?g???%j5K?9_??b?6?I??JKD??z)??J??V|??Rj??uV?-???rP\??R??$????o???S???????q?8???w?V?7rG?(?S5??\?r?H??*?|??;J$???#,?q?:[????"??8?%?9'6??X???<%??%???"&??b??#&??????k??F?^??A=gQ? ?:y?}n???????/^?[A?x???????s??QLQ +?*????0q????D??&?~?????t?:?? A?2FF????tC??7?k??;A2??'_(?sqngm2?1?Q??)? ?&l?L?$?E??1??????1????n?????????R"v???,2G???A#??q?????\??m??? +7??$??[?V??"?S?"? ?F??D???e????? ??9??I???Od????n?3??(?T?Y???B?m?y/h? ??;L7???b?O???E??y)???i?Y????}?g?J???GL??f?|*??@cV?dC???"??B?m??*??W5??????C?a|2J%?F??h3???#???,??????gX???????##??????=??#????E??s_+-?AZo?C`Xo??j?0e??4Mf???!.6s?i??f????PB\N?????10??????A8?0Y?l@????y??}?}????+8?`?Y}??? Z??k+?q1?@???';?\f!?L%?_?N\n?F?-3,?????4????#???v???? ?wd'%???????k^8??????/???b?N??{??????{??w?????w??]k?o??j?[B?n?yo?9???|j???>|Q?????apDNO?????)|r??@?W??}?????????,??7??k??;?O??Ee???`?8???:)?b??Iv!8??s??-*K?q?|}?,?????c??G??7x???X(~/?????????W?v????;?a?????????5?>?????P??g%@[??//?h?p?Ul?].??<{????4b??t1??????.????j????????sj?!?UE?'??Q-?jyR????o??_E~E?????S???????,?p??m??????{V??7Wp>????????xPv???????,?cV?????(?x??????!??%???????N??,?8??Iv?&??l?????I?6Is?MM?=????&-?ZJ?9???Ct?(?d?"????aTt3^u@? FTd?Qt?}??3?d???w?}???>?????_?J(???9?Qk?}??s???5??J???k?S????M?/:??9r???????0[?)[??E????9x?L3m??6SMS?O??d??x?y?,????y|C??????????b?'\#e???K?g?}Ch???7u??8]/??y?:??C?bLdQZ?A?????b??)?>v??????.??Lb_???0K??????[?>?????|??i???Q??B?;?? .? ?Y\??S??8????&k?7???$??MK#|??)?#E~??7?z?R????x>(+.?????????8d?m??eN?y???????q????'???2?BJt?????g?a?,v?K?=a?5|:N?????????\??T???W??O??g????;GQ?&+?)??f?)?[b|%n???x D?9???]???.??K??2?? +???vu??x????$vle???l????????v????R{??uNd???)k? c.a??$??**??W!_?J?Z?q???w????mB)?W?}K???Zt?=X?q ?oL?,?????F?k? +?k??s5+o%?/?;>??? ?wq|?;c??;?-n??|Zc??X*?2??????s? ?o??????Y??I'?;?????p? +?c????|e|??Jm ?VG2??qr???wQ?.R# ???0 j?W??0???????},???z??9b}?M??C?HB?j?????W??{.??8?UV7??*?,'v?'??r?9</?????%??????:??t4????( ??`?????=?ee????,H?k ?X:?kgQ?dE?E??\?0?ud? ??[?'??8?u^3?u?j?Rj?OY???R#??l?0??b?H??9?1g1??1???yw??\l@54-4?,0 ??A6 ??&{e??aPJ??e6?6??^??Jyf??y?????K4??b?B3?p+??M??J??Js? +??r?h)?6n;Bm??a4?K???9v???j???&??]d??e]?\-\tkg??z???Y?w???uk?o??X??\u?K?z^?*?|Xm?q?~v??l?Z?w??????????o?K}UAT???z?l?1)7???6???CS?c???z??3???Fg{??8?SAtG|??? le??\#Q??*D?-p;;????+?;?6?????2?f????? ???=??Q|gN?????|/?1_$Bo??(L>??t'??A3??!4???x???m1y?a??p???3M??F??N????JgE? g???????????w????(??z ?E?G?q?Q"????z?Kr??? ?,G??~y?5y.p8QV[X???Z?BTJ??(?D ?^? ?(?,?E?#???%H!a$@?? ?!??a????/z???\???&???"W??<_?$_5lQ??ax?x6][?????]QS??????R?rzP-??_9????kB??????}?[???l?]?mD*m3-?o????????t??*?b?p?%???? ?m?,-2Q"d??t??[????t??n??.???Fi????dQ?\oS]??V?A,???tK[????$!8?NZx?\N????b??A???[??vP?????>???N???-?E1?????7?'?%?-???O??}y??"Q/?g?(Nq???y????A???hZ????`u80?ln ?????N?$LFV??? ??? ?_ ??B?)Ixs~?R?????k +?z?????7C??1mB??????????sk~?? z\3T???+'O??:^?'?? ?"C??=?t?cS-???^u??Qm??)???????FM^?e??6??\??????FoA#t??6yUW4.n_ ?o??R?????G?Z?^?W???????}??5H???#??+&?Z?????9??????????a??,??}&??f(8%?R.??S??p?Y???v???m?{???y?D??q6i???????v??%???"8?????.$+?????'n?D???????X???9?$H??$?9??????m\? ?^?_??U?t???????l??6???q[.??'(H'v,???K????=t?4gJC?aQ??z???y?f,~????Q3f???q?]i8????I????s?kpl?e??G?vIm?\||?t?iI>OfwL?L???}E???~??pI?"{?()e?<#n? +1l?t.???(???)w??P{#??m?????7?6/Q??*????>t?w?;?s?O?:e?iInY??h?k?,r?: =a?8???=??C +???CA???aE?6[?+b?d?/3?????lm?T?S???L?j@?,?]???}??C?>??]???sL??qM??F??O?`?'3?'???6^? ???{?g??????}? E^?"Ce?lP5L-???? ???kAA??h??N?^Kn??h?E?X??O????????t?D???????(_??@?sYpY9Tt?@?n?Z???1,?|?? N^??????u????a?/?n5(H?z*7!??[?wR?W??? ??Yp??PR?e??@??C????&^p?l,????\l???<l????????f??{ ?????B8%?g?0 s<??C?????]????`???z?`y?? l??? ???CK?2?ocqk`mBT-?? ??m?pT'vG???! ??/\ ?????QnY_(??{XA? d?6y63?M??????????????E? ?????1(T??b]????f?EV? ???W?_??>??z??F??y9?2??'?P?R??4?j? ?4T#k?A?h?g}1???L????(?Q?]'_?\!?},??X|?6OD&M???f?E?O?%????w?d0?? ?[?.??????h-?a?7?7?[!??d????G?XEv?????????9r??6|??-y???? ~?K??u&%4p?heG?u5=??-??? +?:? ?F??![dx)2?? ?J???];?L??X??e?Pm???Y=??8=)?????????ZSb[X???uOrzh1e|?&???t????7?w??n )^-?&?T???????\?]?4?~?X???A???????=?g?G??S;*???? ^EV??W?`?k?6???f?^???7??^?(?,?i?????f??PM??B?M??D???( ????06????y*/??O??L???0?5???!??V)K/05?f??????Qcm&????.?=????RC??/? aeGw?e`x???L2u(.??_??? ??AJd8!??;?J}?_=7???6?v?t??J?3?????sY?D&??RY????0?????3??K???{I???Q???????+??N????U?????+g???T???M?}??????^Sl3??P?p?(?D1???????TB/? s "?:??V B???????p??? +?s?+g&8?*#|???B]$_h??o???a??lr?(???{U???-?xZ?U??)???V ????8????/??????q<??????z*U?????%???*C6Qd?M??  ?MXa?6$?b$???2eHA@y?~???s/??^????????"C??Ptn?j???y???d????????T???0?????yaU?????!????????`??????>?????a*s@?6?v6?? ?g?-?_<2?W<=?MepB?n????pN?i?????13J??uUN?=? t?E +0B??i???j.r?e:1^???'??@??y?{6*d??pAr?]AzpDU?k??Q???3o?vh??Qb?? )??eyr?]]T?#??.???u???=?Z??l?u?v??q???f ?KJYj?{?????|?w?D???????F?e??+?!???q?i??????a?*\?C]`????????#??m?m?v??2??@???@=?k???=?m????(k}?????iT???H_? ?.Fe????OZ?=????r(?rex ??:??D??????M??9b?F?????????m?Qj?`????&?~?,?+??kVnj??>??d???M?"g-?B????d???T?Q?b???2?0??r(?a1D????@=?}?ql????_u????]P?w???R??S??9????? ?????????&??S?Z{&v?[} ?????????M???I??q???????ewL?`j?6v(??N??????|??E?!At/?/?4???u?,??R?\c??,[???.?u&`5.??RTT?B8?0?E? ?dO???&}k(?>??WgQ?KoI?Co?8}1|1|1??R?2$? ??Q?'?U????&????e?_]3????? X,?6f?k{?8A???*?$? ????+??PUf????z???j??? ????6??N?N}?5????D&V ?2k?C?????&?O3?????p???=??? ???i??=???@?|hW?lTh*??????Ll?????9>?"{?2HCIae?Q????%?O???Ae-M?M??????)D X???m????x? _: +%WO?k(?????????? K?t???1?k?`???L?-2WH?Wud??????V??' ? ?"C???sv???@W>?R??????=??c;?b?f[Fu?x??R??6?|?/2??e??jB`??h?2??8?????? h?H@??!Rt?G?6o??' ??,0t?????tK??$ z?Fp?@??Y ~?<>j1(#n?\?g??Q4?]X??Q]%????r???b{?u?}?U./??sM?@??? +G!]????vv?P??????+?H?b???\?4??:???0?,???Z$ 'Utyg??????? -?.??)wT???)?.?? ??=-jHm?L????v?S???j???R?1r??3]??t8?P????R??3??1?x?????;%?VyR?%?i?a5e?XzaV?????N+??H?????lb??6b|o???????z??????R^?]??L???I??O/uMy/??}?????d?c4?q?????????h:%????%??fu&f???l6?y???^???j Im????\??o_h:??s???Q????7????0r??XE?Z??t??*?Wiqy ?RI'??)?t?>????Z??<6'"??#*?????? ?G6B????c????s?&8?d?^?_?????????k???K|?t&???.?????*????? |i2+???QR?)fs??;?s?LAA????7??2?MKOO?? ???a?????????2?yg????4??c?P???-d??s|?R????k???i ?a????G]?4S???C?2??v?Er<:&??xr?[a??~%??! ??m??!}g?8??????!;???0??y?,?ItsdcHSqe??????wTC?=???2?w???Q`?;s??&3?E%?@??t???r??????ns2???[n??????hu??k?????????Ku????G??N??_????8??E[?>??X5n?_8b???JK|oO??????z?7\??[??K?;? +o?e4[??5Yw?Q/??~e?0???k?/4????^?s7?3p??(?0vo{?cf ??7!?? +?V@?i?6??>A??N!?a`-R {?>I1?J?S{?AA2?e?@U?6h?q}%_???6`?f ??V??i ^W????j?B??1??^WP?Li?C??????!? ?6????(????{???!w?$? ;?v? d-?D? ,L?V??:c.g }?0???OI??u \`?l?????5??E?"`?F??? 2? ????? +T???? ?v?:H?????`vL????FZ ?e???&?W?j?}RP@??,?^Q????E ???5? Y_?-U ?I??f??0??h????4?r?o)?a??????w?"?:?}{?????~??Nz?? ?N?x?)? ?n????????? +???oH???4=9H3W?$??*c?3.?q9??q7??{9p1?8l!?:j>?!f?? ;???}????c? ??Q?;??????AQ\?? X????o?d?=??vH:r?er?W???r???wb?????\,1h6?1N??R???????Y?????)???#?3yn??>?1?s??4A@???G?????v~7d??\m??!???C!?7i??-7}???Fd????qZ??;? ?xD??? ????$j??Lfg2????#l?r? 7?????je??L5\%?? ????? ";+1d!2?7??V2]??dbG+??_C?7);V???>???Nb?}>uhZ?xvB????Ep?Sz?D????Fb +K????o??)??J?P?O???&?)? +??I?!O?@>b ?\doRw??_W?????:#@????f????e?n???}?u?C????6]?g?{]Bd??8~??l??Z1??q? Q?/?????kb?U$_Y????1??a?>}1???R???o?????x?c????=??Y????j}p??r?9X????@e???7??2U?|?-!V?.??Ip?R\?kT??~U?p??Z??T?@??(\k?7?&????K??[??Y?????}???~???Z???x???J/??R??\????????jI???A-t??{???uP?R????"}dc?k???????7.?jM ?&????ud[?S[Kx???`?????%??sO???M ,???Ei???j<7PC?????????k?V??????|??5???v???? ??R?o5?x?u??#??????g??X???%:??\??&}?qW ??!d?xtY?x?o"??5Y?q}"?^x??u~???E=?????%([????H??1?:??X?8w` r};t&6'h6?xe6??3C&{?dQ.??K???P????@? +??yw?}L|????+?o?Q ??^?e?n?x??e?????Y?"HWiiC]j#?in??????O{?3???????????l???}pt? .yD??tP??~??/? +dGx??(w@????.????i? A?2?l[5 A?'SC???~?? ????]??????'#???h?u.?_?]?A?????8o/????~????E????W?iHA]?????_-???????,;?d?=?xp? ?wG?}hp??5?l}?? |mQ ??7???? ??+?9\)?. ??3 ;;?y}??????B?????%$?ey?X?v??{7a???????????:TW?K?.I?*?t???p???J?_G????;?? ??d????T?R?D?p?rE??B?t?????????????y?s?m??U??o?'????????? 8???n???????v?A???@?? +?=????#U?41)~lb??bb??????? 0?Kg???yhXg??????g?i? ?o}?V???p?? ?Y? +^??w?>@?q???@?:'??s??????D? ??hb??I???tB4????`5? V?= v ??f?#?s?8????????? ?/qKG8a?N-? ??v???v?[???o??Y???l=?q?6??v3?s??A????? a?t?[d???x??/X??g???SH???????Aw???????B??? ?????8???y??5b???? 3?y?d?B 1??m?0-??? ?????????b?G?D3?|?L??????bsP0z???k?#???????????!????????AB??p? ??cA??????W?y?`l??K ?? $y???? ???y???9BL??.2??6????B??oE?kc?6?zQ???^j?o???? ???m?? 1P?zqk?{?BH?m?}???? ???('??w?-??;DJ@D ????puL/???:?z\7???X??D??[.?p????y?7?xpq?f,??~7? ???!i????????i????S?#L qj????$^?m?(????_????p??b???+???l?"?J??w ?,?@7? ??G6?d??????????????[??]{???l/?z) +?P???K"r*Y???q??R?0?(F?u9Z??J??Z?6?97'?%;??2CA?J7????X?? ?7s??/9????a?gpZ?y?????#????}$????c??????A +A|9?/?Q??y$?ZS?????U?z???Lh?? ???@?4/?/P??Y9K???e +?b?YQ?+?i?!\?h? _h"?JcA"b`/?i?!?b??e?I}?VLi?????l??????????_V}????T?????Y??L+ ?%?????2???MT?s#?e??l?.4??>H???x???[????????D?&??|?H????j????>???? k??X{?/??:????A??????}????dV6?z%:#????s?!:i^?N?????_??? +}???Q?%??l???=??*??u??}?13f 3?03?e4?HJ*-Zt????A?Mu%r?[Y?$? c'?3?}j???????y?^??o??? +???k??/????-8????? 5???4?t?i???????~?o???n?x??4?3J^? +????3???'D???????????5g??k~?N???V?Qq?C????:??D????E}??*=???.z?~)j?????????????.'???????Q [???^????b??_.5<-??w??t_??9??k#8?Js??.j??Fo?Q?'=T??kIm????/?w??:????S????!???"??S?m?LvOAo????B?N??o???|R????????b??/E?@?b%:7?P??E???b?t?P?v?bY???q??????????;????''?????8nS???I??bYD????'?~??8:??\??[??>??zu?)??;???/cP?]M?P?AO?,F?#?T?Z??+?LJ?v;??? +??s??'?d?Q?PB?U?i?1+r?M?????????9Q?T?;?$?g?;?k??Z?5?????@u T?i)????????????? gl??g?I?u???z?f? ;V??a6XmL??"p??w[?f@?3 ?????l?0????iG@??"?/\?{AU?'< V?Y?u5?4 t??? +k4K`??q??? ? ?a??l6????Z??s/?Vad ;r8???L{????gM? $j?h?2??? +?^?8?v??a??)?????l?????F??3y??T1??X??&^??GlQFSti?Kv???n??????1???1???d )j??3?q?????'/?t4A??7?T7o?6?7_?? ??Z?z?u??9o???`??'????5C?k???_????????g?Ob??qo????=q??PX??,??(???o???n?(???m??n^??eP?[????(?7???u???q?u?KN?2??L??$???B??E???~?2Sa?? ??h ??TMm?!?YB????2k????W?n??}prw????????"?????d?2M?1S? +df>=???? s3?lr??N??Q??2J???B??PH?Lm a????kv?&?o?-??U;M?l????V?????????j?????W??"?J%1X? ?T????l????c0?BZ?L?I?.$??dD?+9?:Q@???)s,m Q????2, ?h???JKu?6?????c???8?q|g?????????? + ?(?rSQt=Y,?O? 1t?????x6GHbI%xf? ??.?2_??? IJ?47%k?? L?4?5 ?k???B?s?F?????;w???]???q{g??cS?????`?????\??+n??hZ?8??)??L!?_ ??? ??Vy"???QIn??yh?,G?n??W@?j=([???j??L???^K??~??^???g?Z??????a1???I??B?2?JV`s?rR??!@K?xI?)i??HFd7D* 2?w?7???4 ds}?[???x??b?9??2??V??!??]????{???CO?;m?!?!???/?b?^K???'?+og'????t>??'???#?*y ?(?^QL?? +????Y?60?X?~???:????o??O???? +?3???u?nS???????????A?????6D|fs4??L?4?i?????:*'?6??V!?T?$ +??" +?????p.??????????G????]0?j? A??9h?m???????c????8 ???}?)?c ???1?n?&t?2b:ShQ???6?bGQ^Pg#7????u??0?X??? ??@?)x?????A_?>x?j?o?EH7?y'?#e?5????z???h????????x?~}???5???#?)7W???v"0? +??UE?F?? ????0?H;?)&xO??{??O?;5???r?9?.M??? +???Q?h???s?@?? j??"??z???N??? Q?r??vOT?"?a???Q?jd8b!)2d-?]V??b>????8?????Jp_P'????? ?A%? ?v????????U:EZ >?,g~<`A?x? +???-???C?O??c???????{??/?_1>??????|!??? +??* ??Vu??{'? ??]?[??Kx????6?????1= ??F??u?q??]f?????~??S??0?!"?????M? }???i????F??=?  ]0\?R/ n4J???b??9 +X 6EC????uG(???~{?/{? b??>? ?v?m?S??L?3?y??O??cN??V???wP>?d ?'@??u?p?a?4? w?]?{?/?CkB??E^??e{?|'?w?a0Vl???? Y? d???<22*???H?nd${od??fd?????? 63PH?1p?k-|?e?l?????=??j7??ro???g??B?@-t??u?h ?????*`?dkS??? Xg ?ZB?.?x?%????? D???? J???{s?".???)?u???[???=??@??| }???? ??? ?????/(V?k?m??X????????T?? ?????]???(?????dd??7L??DD??xx??8???Wt?a}X{?(?+`?8? ?CM???A at E????????b 7k`z??????@b? P??!A?>'?? d????U??W??^=2y??Q??c#????_? +?C?B?Cz??B??dP?? ?S??I?8X??????}????MS ??-y?Wr?A=I?7F? ?)>:??4U?-v +???C?>???*s`?????A?{H??A?f<?yL@?3?e` +z???O?????Lw??????.?????n?:p??c???????c??/q_?_D?9???Hy??go??]?A?? ?\6!?w?o?D?? ?>=\??n?$????v?s?HE?o???715??Q ?6??Kl+?5?????g=???%N%????~a?%M&6?C?!??4????Y ^=N:??V??,????W?f?~P?=:??o$tT?9??-!?0??h:?r!?q|??!?Ft??F?????;??$M?=???c?F?8??2I?f0? ?? ?i?f?z?9h7??????????1?S?6???::?99?E?"<???V???d??dN?$UxY??T +rd????Rn???s=?,?af?ufQr??t?PZoLhp???i;???????g??[;??????/?-j???????G?(?;Qa??xB??A?Q?8U2??BzRZ*???8?^$????_????????e?>kt??V8*2?? ld???B?2S(\1}?????????+[?_??s|Zyx???!?7???????-?Z??p??E??LZ???(???r??3?)??Y?;? ? ?&?'K??R?2"6?H?? +;S?-1?v??????;k??j?w????????~?n_8~??z?????G??kY?? ??P?*???R???????Lu?OZ??I?H?=G??Q?H?KWH???w?1?L?"??3?????? +??u?g???b????????7??y_+ ?.?GW?N???1??Z?0/S,??(?V?T??????KU?'G?h?+?uE?Z"JC?~$# SP-4??E??2?i=5K??6?0y????I?????????Q?+?a?+???J?I??B:+??#??I?Z?B???N?(?J?\? ? ?|??\??+?????e?iY?@??1?L????C?B???v?Z????????8??MJrWlE????r?i$????s1??(?$?????BY?KK??o[G??\????H???\??????x??}????????-??m???\??????I?X-?8W%????;!?/???}<15?$-3?h???E%???????K ?????-? ????f?I?J??r?????EUx?? OZ??n???7[,W54? k?????x???}?J.?(? ??>?/?????S? e??cO?E/?YZ_^?YV??hW?pnd??????9?~?C? ?Ta?| ?QX??H?????? B?\0q1 ???d%???rt_]?o,?;w??}]???L5k:?7V?v???`?5 9 ????C???wI??? ???)5?????U??Y\]??{?#???m????Eci?y`?b9?Q?Wer?? ?][??????????P??e??????:?m\N?-???@I?_;#??G??j?? i:???{???,???&K??~??????h?g?,e>?? ?? ???W#??$h?X???j?j????O-??1?8???&?!??|?SW,??????????+??L?????_?y"??Y????Le?Le????N$??@_ ?????7Z??C]?"\???/?7R-}E?)x?]???#9???>?9?????$ +? ?????I???d???-?2?W??????????=???????????:???F"?]j?B??r83?nQ???z???v?????{??7??m????d ?0??? +?}y?Y??????C?n??!.#K?F?$N??C???(@W%@ q 7j??A?Sy8?q???? +9????G?W??????3?b6?"?|???,????:e{???n????Y_r???2???%??? ??T/?~j?05????? ??L?wT>81???????~??WM?6???l??????~u???a?7?1?z? ?Zp????m?$?:???`??#????j??eO??T??Sk@?5?+?D?????e?!????{???^y7j?Ds? ??J???????G?Z????g?]q??3F:`? ?L??g???m??-l2?Os@???K?? ???6-?Q?$?? ?QW17???X]???xh??F? +??rE??rW;??;??A??5?$P0n-??Y`?z3b03????'??????D~@j???Y?HT???2y1j/?????DcU7?Ps?m4?]JE?e???i?,-2??,??;3?_a??+ ?A?G????I?????|.??|????3?(''?%$??A?pF=nT?A?%??T?????h?b?6?FHS???j??P?????SCEK?`????l?? ?Z??E??FJ(4UAE???P??F_?5?J|2f?S6#7?9?&?>i???-??6G?-????j9??o9?xf9?xM??????2??>M?#y? -???B???5TBoc?????4M p?F?????G?T=? ??y?f?)3G?_-?}???*y???u?y{?W?=?4?)e??5e?}???]f5???|b ??8~??2?w?>????{?+?Aa{???????????~???@?{????+D?g>H? /?G?!X?_??;? $??$4Q?~b??\??????????.?B?@???Bz?? 0??!?H9? H?!?S?`?????"+?????????|g?????????\_O(?? +????????o?bE?zO????,8??????c&???????y!$z?a?|H??(C????????h3~WA??# a??????.?*?7Jo??=1s??????hG`?R'?[??vb7????v??G????tG??N?e?s5j??wd\H9u?? ????Sq?da???O(?????{bl???FG?(???y??????B???$?$????5??????]a??y_?wxL?,?o??5/ '????????????!\!?*K?C??uR3?W????q??b=?9? +?;?k$L??oY??j?T???g ?r??5.0a? ?or?5~=?]??E/M???????v???Gn???????n1{?/?_?$?/1 ?&z?1??SG??[?-?????|_TIy???}dZ??R??DG i?$?t?B{?U?w??>.^?:??_????-????[???Ci???u?S]j2?M??^???()?<=????T3?| ?",?7?Ki}?b?i????9?)d??!???2P??W?? ?k?}k??T?W?? +??R?{????kG??v d??6D??%??ljFd??k???U"-?B??-???8e3?^??????????d9??l?3??%?H^?i?wJ?? ?W;}?\??>?????lu{d?]x???57 +???s??????TBH}R???? 1m^^ZV?????X?V??u{WvU??Rvz_}!?DUndP???1???.5#Y?2??|?6U???Q???*??I#S]K?('e?? +??O?\5?q??3?{?b???E??"O??f!?-#??]??]q?????J??jKC??` r??9YT|F&??j?t%_??,Q?M +??D%Jj????jy??Z??]? iZ$??p?4'(^?????]???? ???n?,7Z?????????#???~{*+?+. ?-?Fg??bRs??,S?)?????"c???V?b?4 C??nx?e&???i1+e???{ ???)u>x???Z\?X?+is??x;u\????|??????2????U??/TD?J???"Ii???yR? G#?d??,??j??I???8??D?iRI??S3>???H???xgt??H0V?????`?3??^?l????o????r???????z?_??T??*-?XI9+?_*b??U???T ?0OA(?QFt?? +????????i>w??d?#??L?{????-H??????MW?{T??[[??????S?-?N?l? e#.\R??? 1ktJM'?:Y]?#?VU???:aU????IYD?G?t???? ????`??@#=?????????zc?{??=^??G?d??????????????H?r,???'_?? ?2z?M??e???*???QP?}Q???0?a???8?tJ?qc?\??? ??n???u?T????G?????A????v?n??\>??@??A???1?>vtd?????RBzM??^ ?L_?????w?y??s|@?82????????m???????!@?? (?????L???????3??, ?E??TD !4?M$?BB??BBqp????????qz#8??C?9d:?h ?e??*6*?O?#zF????lp?EAd??0?fC339??X>????4???K???n8?8????z?@5u????6R?JH6I??? +6??7????)^???3G?E["??\k?&{"a?;?64h??b?4gM??u??`??v???=BR=??9U? !?M?h;QS?!? 8????,?p8???tx?z????f4?Mrs?3b?!4w??b??v?XNC?? +??Ql=HfB?j??D???YAe??-?U? ??A????"?$ ??(Wh??My?6??m???m??????C?c?z??4= ?)?AHe??A?iE??L$ +l!K??$c.? +????? ??C???G??w?9?>s??=???c?]?`?e|e}d??? =k???et???9$???? Dnf?1????Br? ???P??!Ow?4>??.@\ ?]???w?a?o?c??????x?^???????g?}?Fo=??O7G????????'?(?c???+DT???fP?OB??%???@??E????yH?s???6"????d???x??`?/>????cO?~?=??'???+???#????7t??|??>?????k??U?Ec???+???@?j??I??X"+t +2????g#M?4???`X??>????_???O??_????^r$?K???n??????!?qwB;D?}?k?mo???7???!?]??R?=(? bj?U9?BG?E????&#/j:r??G2N?? ?WM?GO??E_?]????????????;?C?if??2?Mr+?5? ?Y??~'n?i???%CQ?RD??!?? acA??!$N&Hq4Au??X?o???F +x?K?{s5n?Yy?????i???)[#Z?~?jQd7%??W?????4&\?}#}$j???????k??g?a??D?T)]?B?X??5??[ch?c?&Et3?,?DI???b?,}????0??ai????!M??o?+?oRw????^?~??<&?K?+=??(???]q'c????P??????5g4????_I{????Z[??YsQ??t!???????d,H??]?? +?]?`? ?F?6?????X|?2???CCE?k?:???9?=?t2?K  R? :? +??(??v>G??m?F?????.??Vv at s????????[?x?F?????`??i?N?b?gg'?U???O?l??k?r??YC?d?????R????S?ae????CBZ??????g??0????!????(A ?g?`?????0?????x???G?U??;????????)}?????TGy?WR???Y? ??D??m???/????7????~???[JG?w???I??L?}???;r? ?q}_r+(?[v)=?;U!?_UG?V??z??h]K?^]?>(????????T?M?#=???????/M???Wv??v?m????6_%?r9&??e? <7?H?Fr#\ex??G??}Gd??????#}Q?#S?.???\??"?Q?? +Q???r??W?? `????4????8?5???????i???i???????h??I?s???{"???D???dM??d_???? ??????w????zs9X F[?.]????O1????Z?3?? 5? ?:??3;X??{??._???F?? +???\????f??w?o9???2??}?~??????E??? ?Y??M???[X????x????92?+??, w?bs?z?????d? ?-??6?1?????9?1??o6??5?zk??????<????.`???pC??H??`?YI4?Z?:l????????V?3]{l|z??????ZW?{???m???`?????`??????????a??d????T???z?y}g???y?"???Q?. ?c|1w???????}'Q???Q?~c4hx?`f????A?5???EX???? ?g??U?M??#?S?E?G?M??7???}???>N???_???W X>??e?? + A????A4?w%n??????<3???T ?l?D????;L??I????S????N? ?!?q?I?1?y?????C?\??F?????????[P??K???J,t at A???? ??>F????Ya????v???????B?-?u??FI?9?5>?!?Z?&h?1?@???[?7?$???nz?j???B??n9?Z??y?$=?Ra~???@?*nofG|g?????s???^?n?)m???2gc??A???@?"??????h?Y?|?6F?7?"/?-????Z????z????D?/T?=NoL 8?>3?[Y?9?v]???JaWAMlG~?dw???????]??e?????Y????7)??3??|??QQ?@??6d`?????!2?u2?4y??hu????5?F??R=?t?&??DU?????A??8]ek#;???k7?[???? +[?? +??????7?]O??Pl???N'???????d?d?c`?>2?Yn?E?T8?Q?r????m?0??s????????E??5 ???A????? E???e"U?&q??.a?Z????3???????J???????W??%??????Z ???+???0?r]?M5j?Fw? ???;????????Rk??iU???F??????Z?}Kfh?????J?? ???5??e??]??P?m?3H? ?a??(rh ??2FN9???8??!?P??1?????H????mi??+?^???[?vY[??}????~???????_P?????lln??0'?????g??????????L ?W?}??b?8<*???up?? +c????f?t????Z??*??#???.?????9??9???>*#g%?????}???? N???z?? +?8~???,??Zv8q[qI????2yy???????1?ym?????{s?$$?N'????N?_??`?(0?J?T +???7Up??NF?(p?C???h?O[*?7;h4??????7;VfW^?\|4???b????t^??????RAJqML??Vab?????? q?O??bvZ?H*?Q`?e)??E$?J???FI??Iz??????#?y?L???~m}????&?vyC?M??N?c=r?w?fK L?? ?s?????c?q??bw?]??.??,{]6??|%BX????x??,??JI5???? ??^?ze??W[????\cG??c???6oVqk?UA3Kvc?kzC?WJ}?R]Vp|m!?????5?1?=???????????sQ?+|A5??????9?=0?J?]?/t_???~ J?%#???1??8i?t{t??gw?r???tJn?s????G??}? $????? ?5uG6?D?7>?l???k???????}??9??"3 \#3???!??????C???d???t?????n??}Yi}. ???Q??m???T?ww?z?w??????oE? ?B???G???F???????????v?;??x8?N{8?ZtwAW?a?*?I??c$???d?t ????@?8 +P!{R??6?'O??J??U??tV????>??7??????3?????0w?8d?4q?f?d'?;u??nj?c7??? ??+ J?+???X?b?5?z??? ??t?u|!?>?????????c??? @? +??,x? +???9??8?T[4?Y???9r???"m??d ??????PN S?Y???????????[?UP??"?$7?8 +p?k???C?!????m? i?Ey?!??D 1S? f?????HBM%t????\)????KR ??(??c??7??J?dV?1?U?$???????2??o??9???s??|??/?U&??6?|??l????t?????K??{\??;???[??? ???n?t7g2??f????????\?-?!?\???Pt???`?? &-?A ?o?1k????}????mx?]a??Km>???????b????w??????-?????=??p?z?6e??m?r????swh???#f? ?Lz!D=H???L{?B7??A???y??S0w?{J??,)?l?X`9??z?_m???? B?????7???c????? ?v?>???{???=e7?3g?????a?y??????????k? ???n??????Gy/?c??g?u???f?????y6??E?_???????????6Jl??,?????^???~??????Gw>x? +??p??}?m?2H?v??7?A?j]HG?Mua?F=??u???]?>???cf_??I.}?+?|??j=??? +???V??q?j?pp????]?)??(????/??C?u?m??yb? ?{???o???!?jt? ???Ao4*jN???[???B????z?X??4??????B?=???x??y82}?}?{?)s??P??????? ;?z???????] ??????r?K??????a??s?pw/:??[? iKu o?L^????:?????Y[?'??~??X2?'?N??J?????0?:??J??E??q*??Yu???? ???=>gWg?w ???z3?'?_??Y????=? ?}R?,?@? +?k0s? ?)??? ?????\?? +\}Wn9????ON??%?;]??u9#u???z????OE?NF??ws?g?'c?J?d??:?S????}???g?????6HF??^??IFf,?E+???5?u?f??NgX?3?+&???bq5?m??=???|:Y???(????{w??:cj:xR[t?h?J ?:???8??;Ii?} h???W =m0???z/u?3??I???l/^???`??? z_?P,.?F???v???O?????9?)??I?G?H?????????????????a???4u?? u???D??xk??F:p?????5?R0V??* ??2+?m????b?? I?g$?v'EQ???x???4?Q????/&4??S?????TuRshm?????KtU??2?%?2?}pe?w?*??R!A G{??f??)????+?????`HenUb ?(?V?????h???9???X???|SV?Wcf?_???X???T?? Q?i??S;??????;???qfi?;Z??od"????&???B +`????U??v? ??vXp??u?/J??????2?????F??o???r????????Jy?*T!h /N????{Y??A????-M? /| ?e@?,j?,} L???l?[7???J?????k??V??5.K?+}M~.?l:R?p??????????i^*q?_y?????S??+h??z?T???d?f?epD??E_d??q$O1??Q| ?S?`?? &???c??;5\G;??????_??qlh??n??????????%???0m?T?v?k??7?!?SP?????!%?R?j?i??????N&??2?U???????T???? ???e???????!? 0?I?????$a a??"2?F?R??Tk?#S-c?m??????? ???V????,??????pz?!?????{?w??~ ?&yl";??!o???a?=???q??N)???bw????sN?Ze?q?\?s???T??????G?(?^?????7??;?}f??}????R???g'???N??LN????b????g6??,???>???"3(;J?? ?????? ?????8+cf?*,6??l(Ng?J?\yr???'2_?Xx,+ln????_?i? +??? + g&^?????t??/???O~??????Ho?7?D2p???$????[F??%c??????????? ? ??k??v+?3?y??9??J??[?vvupJ?7?Y?O(?[rT?)?URu?[}?CTt1?5?>????r0????????r??W??S??jK?X?B[W?g????|????K?%?.??"L???6?j?WR?J??? S?*?i*w?DV(??e??????+=???E?% ???7???ig????P?U???TC??zcX]g _?y?3???u?f??1???:???Y?EN ???Z???Q?-^j??%??|???L??{( ?????( ??cH?sk????J2p?d???\%??1??=????????(?M???iF?Mj??)???~h +??^(wt?8?+E?^{?QD???4K??qQGg?\:?0p??? *?aP??|??}?!2???3?????? ? ??????H????T?18 ?+??I?]?(St?0QBa`??**??0?0????????}?????c?g??0g???{?S??????+???W??u????=6?qkcw???;???n'?Mz??(s?c??" ?f?Zn???LT%?0$???K?G??|???}????????????????^KO??}Kx}?y=??x?|???$??v?K? I??d??L????V)r??>c??#~#??C2??7??1FkY?????E?? +??????[&?N?n????;(??x??X??,???/???U???????????7?????C?Gr?7????2r???hgOC? g?2PlYf?? ??0=m???/lkSW:?N???2?j?J???".??R??.?*??@d??H0???K? ?, at H ?K????# ?l"(??\??Tk??c???,?9???q????[????;??????????0??;??3?Cj?n7?]av?,3m???k??s?t?????vW?#?2?ZO3???? +F?tA???;g?N@???2M?-?????zl[O??}?q@?J:???q?f???r???z??z1???j?????V??????Q???9????.!?????v ???H????sVD?????I;e?????F?^???D?????[dS???XI?3??f?/$??\I???I,$L?+?'????c\.Ew??D ?GM?"oxD>???|???o????=1??'tA4c=?P???? ??v`?? ??M???8?Rc???v?1\?c?Wp?Sv?Ug?P?b??f8?%? +q>?,O?S?SA.?Se? ??1??????a???? ??K??W*B^??h???{???^ p?{'?_?X?????z?j????>?PA??U?u?2?L??e[%?8???]K ?? +??$?WQ?b 5??307w?!????<????????!???.(?Y{x??????;v??Fc?h?J?bak??co{??MU??4?Q???mTJ?}?"?E^?C,,*????Rr k?Bi{P?t??%Y?JgJ^3??4??? I??;??????*?A ??`?{W?6m=?-?F?Mn????h4AfU??'UUqgK*R??T???2???T?#*Q?? +5?[??!b??????b??7h9???I??w!?cZ?\?????~????{????aGu??>U??/m?? ["O?7'??4??? Y.??\"G##???Sk?q?}???????5z??%=J?A?Q#r??t???3??Z%???*????#?8?????p??^?a?j?c????yAo?Qqw???B?%?3?6?#???.te?x?????[?)??=?C??i?MZ??%-?q???|???./E?o? ?3h?????????]????#????x???`??$o?vP0r?;m?L?L??&??b?$.??J??^?7???7?g????%S?_??.l?t!??.D????{p? ?!??$?????&??gJ?L?h???d?~k?,qW? ???i?A?T??r???I??? ?u???s\?8V?B?r??]???}/???7???o?????.??]\?wqw0=0??{pv??????E???!g?(???p?]??-?v%/?L????s(?Z????????S????B???B'?g?????????k;????y^??4?G??????A???O??j???,?od????R??k?`???5f???m??m? Co?M;??????V'j??????Z????g ???-?W?I???V#????(?8???g+W??Wx?R?A??D<1??'???????a?S??;?v?wQ???X???q?%???? k?\?~g/I0? A CB??? e?!C???(?z??v? G??9u????%?Y?2I?t?=?@x??#????N???0? ?q#?7? KPMj?lRK1A? Q?0?&?x?W?E????? >f??3L.?09?x?"?????o??o&??I??aM~O?H?6?0?%(??(9J@?R0 at S?CK???A-?@O +?)t??.??'????Dqf??6?w_???_G?I?P???twn%?$?5?\?/% =??L??????@C9?RjBKxAD,y?4?sb;??4(???? ?T?%? 4[?D??,?F?? ??zl??Ilg??b/EY(*$JX(????r?u?1&t?2??43F??A?k??????????'@?ThlI?4T?d???1P??B?p6???Q??A??9h????????<6??'%?? ?)???,kLR?z??'sT??9????????2z?C??$?[??A?_eJCk???k????3m?Yh?FU<??8h???&_r'?Y?qywL??;j??1??3l|?3$;????~+?b?n??? ?^? ?+i?S:???N?:??j?4'???P3??????????????D? 6j?9h??;n??7j??7l??7h??{???{?:??W?????^+Ky??rn??n???KQ??P%??CK9 ? ???O??T?"??????X??=??qJ??uL???~#???W??,~?&G???t?? ^?t?????Z??/???????-?g?G?n?C???????@?9$_?????kDE3?????c?????!?>?8^??T~??:A????m??t^?d ?9?>u> lq.>?=?????D??E?Nu?z??????u?j?uk??{???E??)???wQ?GD? } +??)fA u?|??? ???\5??7Y???S?' 7????J??+kV????z???<???8*?s?)??????????????wT{ ?_?@?+(??@??S?????S0p&???K?(1f??h??HgF[?7?%4??/hZ%??N\???ww?V??E????1???OZ?{Hv?????????O????u?e???????>????|q?y_4? +???e??z?5?F?y0??}+fC? +<^nI?Oq??M???H\"??K???I5?}?g????&_???Zqn????=???* -?. )?w*?????ZuIp??XH?uq??Eq(*????0{z?)B>?qtNa at o*?????6 ?W?????i5?r?? ^J??T$%?)OX#??/???Lei?V???Y??????D?W?yDSq???? +??????f????E???G?U~Z???0???d??J%3_:??2??)??5???v?? n??@ayZ?????????sO$g(J?6X'n?>??M]??)??c(6??`?1???i???t???o???N???!???????S?+?7??IhY?Df?????? 77?? 6????8?? ?2?HN???KK1-J]?,\?????M6?)?????e??,g???:'??H??FK 3??NOU\?a??j?z_??=+?;?Z?C?????3??[vr???I??]??P?k?"?U$J$??r?m?aK7RTn????`\3C3??{???B%?Vjl??&t?s*]??:gO?.????w??r?x?????y???~???Q?4???,???,???l??? ??9??????]c4??\???P? ??R??2??r<?en?9????f??eR????????H??)????9?V9Kc?F?r?D]vG?pDQ????w?|r?????O?o?E??w?/?????D? +?R????u???n?n???_/6?I?4????/KM?,I????R??Y?X?'4???O? ?nr?-/^?S????n)?V +?8??????`??? ?3qf3@??b? ??????L?????J???rO? +?V?????2I??BQ??(/?"77?&[????????]S2k?y??? ?s???^????o???. ?D???0??I8???pC?8???_'>?Z+???J3j]??JegQi??????????? c?3%I??? ?TQ??/w???v??xD {?????"r&?"s???N?D 3zo#?N?q??????U???u9R???&??;??*????[????^^??U?%a&??? |y?U?,?.A*t?= e?/?d????t{?J??#n?x?=L?~??4????9???\?A??? ?@??--b?E9?????????\???L??B??-RV????????p?2f??i??w??u??)??[S?????=?d.7?.^?^??M???????????g?????,'c?&???K?Mi?o??Ge??;*?_?R1??QY??,?K/?_???"@? @?3?{ ?G????\??????*X?? ?4? ????kk0}? ??y??)_??T??M U?????NN_?????{????q??~S?|O??????G?_?g/T??&??<?7??????? `?/:???:?GV??????GsX??=,?v?P? ?O???i%"??H?J?R)J^"?~E ?k?/?? 0??;A?????????X???-??g]Bm???bP'?`1eb tb??2?A?!??|??j|4?!????84 ?????????1??????l??w?e??>,"j?|????*??F??5??oa"^??3?B??H?QB?O??}?ZO#?&T???J????eiD??F4??D=?NT??du?tB? ?tB??j=???v}?5??q?7?7 E]JBH???*????,Q0J?&?$B??A at dTAAD ?E?q?;??D????/?3~????s?{?}??a??A3??t!????&?????'??L????C?b?QC???'&t??7:??q?.???U|T???Rk??Lk??D{??X?+>??????,& ?j`?B?v ????y$h?z????m >?Q?Lb(??$?'?:n??:f\JeWP^??S^?k)????g??(OY??'??1?61l??b?RY??????{lH??7??&Z?2?*a???t?GLIP`G?Y??O9?%&??Q?l??/??P?[???Z??F,??[?? ????jhyGi???e?2?e?so? p?d???do???n???????/???@????Q}??tc ,??+?t^F[???l???#?:??^?/?????????(]??)]?Cz???6 +t??OY@!@O ??SLz,??6????k?\??????%??W?*?\?M?[Y?t??T???B???~?++??zV4?_t?_?ht ?5? F4? ?j???? ????Q]????GW??P$?7Lyk?Q/}?XdE????n?mO/??u??z?F??Tz???.?+G???h??5;5?\?h?s=???rL???????3????????y????I?? ?jr??'???=pE??z |?%??*XF???????o???[M\?????????1L?kC?z?(iq?g???q:?Y?9?C?1?W?hx????(??P????d???L?????M?????????O?v?uY??????7bU????$`??$?l????|Ay??|????-???*??? N'??'?8?6????8/?C1????u?"? ??'U???o?5?ZlVR?- >hYt??(??v[`?mA????A???_? B? ?Yh???sF{po Z2??r?%???JQgR ??Tkrc?@?N?F??"R??k??????6(???~?N????2?QhQ??? ??:o?1????K?C{?3C?gn??? +??f?A??ph?????9????= ??(o [?@K?8??#??/????*V%{?UJ}????;%;??F??RNa?V???|???R???}6??K?#???"??S"?R"???F}?L??iQ?l!(B??z???[h?WQ???F? 9?MM9 p4??t(??Z??Z?2}?jY??fir s?4L?01?(?!??+I3??r3?K???*mSb??'??:$?\vL?D??&?~?J??\i4_h?9#h?@???2WO:?{??S?d???j?M??y??=9+?e???flX\???????m??,?d$???'fsS???[*?$?????x?%?X??eq???q??x ??%?b?)_? ??A}?????????Z??m?2?????DI?%?]h ?KC?(??p???Q???y?d?\?jL?8??{?6??D??????D?C???ID????4?*QJG?$???1??m?q?f???R2$5?=??~(???????s???????< ??>??L???dK???,MM???a????????f????c?v????oT??????Ye?z?S?????+s?B?zG??E?6,(??=8?u??(?%:R??>?#???x??G????????????aN??????]?????I?cU?4??g??.a^???0?o?GX???? ??s Y????S?>+??G@????/?fE?????????#O?????????????6#????????R?((q? ?.?M????r????????i3M???&e???????W +J??LLv +??m?????N?MI??6%?~???V?i???????#5??}x{?????y???@?vF????6WIk??H?r?k.????=? |?????*d???9?bN?2U???A32????ou?????/?????J?????~)?~)?nB????=? W?u +r?!x?a??#???????C+?????v|??o?Fj???=t??y;z??1?d??03????S??VN??`=>'??w?>???N???Vi?&???gk???1?'KXw?&??B"r???_?u?????h#f?5?D??4hI??@aE.?JV??dY?????k6s??E???e????&|?W ?B?????G???D? +w,?????H4? +??????E?x???Q??d^?Gf?dZgE&u?dT?A?^??? ???1??_.!??q$o?f??????7???&?????(:q?'????8??? ???{?_?? +?m?U ??c"?&"??4???>??5&??~??2????????V|?|?M????m-@??A???m{?ub??j????y(??N?5?#?vF??p?????^=???H??Lh?? 5&??70??,?b@??L?w" ?O????J???7?|?????!bB??%2?[??H?????#????K??? ?0x! ?D^'?\???%zLhY1?i????? ? +?@8?? Z ?@? +v` {A!?UL?qj#5??ej????j? ???pm?vz?'?B?? C??=?2a??2???E?Q\?&a-????e ?? +??'??>R?p????p_?????^Q5a/?D#|?i???? +`????3???0?? =?.?\h?b=???J?Qck?nd$?X?$???;Y ??w????$?????? +Yx?U4??N??=??K:#?L???LX?^?d??? sO&L????t??g0o????V?wz1??nk?Me??of?trX??n?R^??????T??3?u+?Z???D??uk?y;{?]?*]??? +???? ??P?F$?????;?G5???????f?E???R?h???P??zEn??kM??g&;?S?|??? {d\??e5FWY??v????n??o)?%? +!??_???[k?+'1?'?? W}??Z?F??v_??A???:???? X???y?*?W?R?e&?R??{?<~Gy@???Xr[yJr????????????L???U???J?.?_?@ ??^?&ZI??Wo?g?9????&7?s?? ??j\y??"~?y??7?(?m?X?M? 7?Re???dN??r??]??e??????0???`?p????v?D?!\?>H H?????F@9*??E???????/??YP?M_??E???M??q??Dc??Rs??F???????@????1 ??=B??u?)? ???????k??<????(=xn +_???????^x???p???? ?????[!B>F??B????>?8_? +????(??{? ?i????^^"??8????k*-*4o.?[5TZK8????6????"??k%k?????^???]???[9??S9??Gl?i? ?s@n???%?`?Y,T)??*c?9?p????2a??NA????T?~?I#/?R_RbU[,???????Z* +e???ne?w>??GI???w??S?????7?B.???+???s?A???7$?c?\@??yT???x(w???IkC???ct%?$?Q????E??(?,?Z???hu5?Wc?pk?!?A?=?? Y?*:mfn?pL?????t?&s#D??#??=-?????%J??2XjI??dg??2Eh?c????jk??nc)R???T??>???v-w ???SY???f=`?K??E??y?8z??????A] ?N?Jt8?????`,j???Nh_?m?#??8??GKZ?L?JfJ????|K6Y?+??)??5-z!oR(l??ECv??7????/?{.?S-|? ??w`>??t+?:??|??hrV ?rO4.??W?]????s??^?-??&??*??p?? ???@[??J!?ViCr?_?}_??????`?! ?9?2?b?*Z??U?*??? v5*hj?????m??W;??>??8??????`=??5>\???????G?????=?9??9'??0?BOg?9??w??"`????? ?????????4??9????>/????x???yD&???>?yO?;??KwD\?e?????P??kZ??'????????/80?bs????6#oY??e?? +?????y???=u??3?^??N?@?K@?+??^ ?V7???_?S??o?[????????7???)??Yl ??K XpD?g??W"l?H???D???&!N?I????Bf??x?T??*???%???x?]x?? ?)???c\?#R????C ???h? +?????y4?]w?=3?z?1O=f?qo??<^??????}g?|b??3Q??????#?????i#"???T/Mj?2l???l?yT?g?aEP?=!Hi??V???R7hUT?????$???? ??.U?VMkm?,S????????K??Lk??;?7?93?v?????|?9????.???4nu?:??8?gUl?wEL?oyt?it??8??KT??p??????????????|?Q??/???????dC?y??X?k?C?v?V{aOF:?sl??/?oV-w?????^?R???Z???V??=?\a?,M,?)N??+Jh??????q?S?????;h??)3??0?=?7? ??g?o???y?????????[z?0???|~5?Y???9K?Ej??y??.#??:=??B??Z???V???lQ? +?>)??y???I????????rg'?%([??4G?X?????(?Wq/p??2?????? ?=9??s?n????Y?C??{??6;lLe?J?r?z?M????7?:cr?Z???n?5?J???????6YV?? ???`???<#????%?k,?????K??5/TX?Bi?,???P?s?cn?*'C?z?)?5+O????x?s >iF?????$C?,A?+(N88FR??*??? ???Hcu???G?? ???y`???????vV???(?? ?VX>??(?F?H" +??B???y?N2?0?#?????hkb8?9???od7??m?)R7,?M?f?R??QQ??)?.?%?6?5?&?}mM??????Uu~????/W??W?U^??U ??6???? ~????(g <????? }?X?w?[??M???HF?$???P?MCr?\Qb?B???e?1-????c??iNv^?%cbD??mY??#|s??????5??y?q?'???O??a?????z?+??gN??UG????l?^??:t??"??(:?"~?bv#j?sX??????6?;#?Vv?sX????J?j\????;?^j??????m~{?????{??~?}??a?9m??z?>??]???r <$????7??,???qo????2????Q ? +D?M]?0?`??"???O?? d ????????? Fb?`??H? +??T??]?:?????+??lv?????????.???,??, !,?Il;???T??t?Qq? +??a ??@? vDTD?(T at a???-EEaD?wq???z?{?9????w??qz8???;?}????????-???o???0$?t`????h??zx???)<??pu#${?;???`?@O?=??????? `???q?#???~x????`2?D?x???x????+?????4???VLK'L?0E;???k??L?M?Ow?????x?w0?p ??qq%F(?3??7?V0?F?1?)-???UO%Z]?1???O3??vD}3\?p???O_?I?`???W#??D?2Z??Ko????x???v&?? ????????G???p????+?*?? +?x????&??:u?U?b?v?s?.?0???Y?u??b???l??s?0?%X? +.1o????C??&?S?`z?F??1tW????t????5~!???? +??. V?EA???s?v:+?B?;????t?? ?^?!??4???L?"????02??F???D?c??????w????t?OE7?"??4?.?k???Dgd?tJ^D'?t\?????4?h???Tt??b;V??~?T??>?]?b?S???^???a?{~????`{O?q6?MO&???{>tk?X?<.????S?ixb2 ) 48?LG&?S??b??I???e?>?*???&??6?~?v?r{??g??4g??6?[???]????d?Md??3?????p??????? 6?x??.L???&?P?4??C?????? 6??[?=?6??????*???????????v8vv;n ?u?0??p????ic?s? ?S{?k{f"?6??????hd6??0?s*.?PIhp?:?!???E=3cX?g&?w??9?!&???C??B??3?x??/?m?^??9???cZ???i[]?U=?V???UW-???u?g?&????Q|????O at 8??(?c?N?:????=?/|,?D|@?"C?;b?+"??%???.b?gmD?hE??????????Z???:?g??????#???9?>+gPs iQk?????noWKik??:>???0?z?????- ???v?&??I????$??D???D?`yB?p???^?.???????[%.?k?)???[???h?q?}?mi??????W??X? ?Fy?u?bp?J'?7???gv????N??Z??f?Tj?Oc?????j?W????-\?R?bTi2???\???"QiR?wqR??=q?oA?&i~?^???c???[2k???? ?I? ?x????u???5'?}???"???"??~????:?[?h?L??f?L ?.7?u?^??X??R?? Ju?bm?GQj?? ?B??p?$7??/'?????-3k???47?w?,?O?Y??G??=0???D?!????A?\+?^??9?O?Y2Z????????????H? +?|???D???T?=-MX`0{?l??Eeb?~???k?f?6?2???F??"]{ ?}?XB?U?k ????:?4?%????????I??&???4.?}???$??u+=?Z??K???^? ??# u>? ?}?k????/}??9???? ???-???Y??@????F??do?n?kJKa???K?(BK??v?:??)v??CtzI????????/c?)l???????? ;???#??????F_s????????????????? ??9=??y?d?????CvI??????,`?(????????YH?F?V??t???tr%0?0??bz ?S ????{??u?P? ]?"?o +_K|,?e? OKTV?????@????[?????gt:? ?(?!o???-??b/6?6&???8?????????????? + ??u?.??}??O?3?J??????z@}??n?e?????/`????A?|??F??5??i????C????|?g????g????oa???^Q?d???M?7?,0???F ??yp?Hn??D??r???%/?lV??Q?r/???| >?'????_?sbw???M +???`?????q??s-?G+???Eoe?[/?"???2??t??{???U????>???c?m?/?W/??N +?'L/~"?H?t?u??q????0???????@?A*^!"??_?~!?????d*?K??w$?9??_???!???>EZ???A *?Ux?~???????#????t??v^??H??y? }?P?y????-??)?????????J??WB?>?*9~5??i???f??????~?????m??/???k?? F?7?_???????DG2?I?'?1d1?c?>??????rw?G??QP??+?I0?1?7??)??]z1?e?c%=kX??d?L??*?k7=?t?} n????????W?:G?F?v at t?????$???\8{C??????Xc?O???z?????????c???-???n?k=?tT? +??2N?"??mc?8?9??Yz??oG?L??X?$? +??????~??o?????q s6??Yt?gLI?gcXM?ZZ6???F????y{?8???8-\?I?|,??"?:??Z5????r;u?v???????w ?'?\F???_q?}.v??F??h?X?s?d?+?pF??S?,?T??c?vXUe????UYP?j?q?Ty~???ZP?-?Ly??KD??y???#???{??_? ?{w?-? \??? ??q>p4?t??SA3p"h.,???S/C?:??tSg??:G?8???`>???s?P7a_?C??3?6P\???:?&eUy??????2[??$???D??yx?AYW????? +????????_????7??M?xFkq +sgqVHs????+ +u???zm?V[-?.l +wv?* ?%??Q?UsBn?\?,???4??$7?????A1???p?~ ?/????k?1?G?n??&?1?:M?S?i y?1?fc2??4??*?c? +95???`?W?B??!???Ml)?#5??t?H??bT??nT?????w?mP???{?6??X?h???:?A??D??w?- ???????????X??\???u?4VuYN??L??4??6??o5V +?????????+-)??K&"? F? ?Ei _E????t??H???Uc-??+?hO???GI>ZG???????.?f?+?`_F???TcK`8l?????[??c?q??&~??.,????=?bs?Tg?'/,?????R???R???????Tm Jfy?<\?`???Yh?@?w???^???yi???9?j?K??\K??X?????:2C??|??JZZU?3TV?*k??v????-Q??eyG#s*>????4*????l?3Y?%(?Y?c?_?? ? +?;??"?Fn?U????E??ET?ZE?f2??1??i????}]????-???5?Na~MSDnu?8??'?t??2???i?O?i??????????$?2(??6??x=?I?mG:????V????fPE????W??^F???dp?3??S??f%??Y?V7s?????*???!,??#B?cP????$?5%Mr??&?K??J????j??Yn??.??Y????n??Y_??q?.?]???]@????k_L??I???T?????tfN{.+?M??f???????.~rK?0?e ??n?`C?i??????G??]O??:??u???2r0?'??6?sq/??a?4>??rA??O???(????|o?6???K?x???|9??16???}NV??;d????zh??r?g??u???/9????.??????q9? ?c?D??a?c??y? +??Y4 ???-?0???C??S?%??o?5?????F+?????~-?70??f,>8?\tp????5???/????e?A??v?=??~?c#?????DG?\cD?d?K?t?h?8??cDoN??shq`!- +???? 4???^ d?+ ?0??@ ? ??K???<0C??/H>???G? ??A?,?C???s???nh4MU?5-9N???0?=??I??'??8M????)???$?~???K)b:???H8?A?i-???:?H?????:L?? H?b????g~ ????`/?????Bs'???O???8K;E????C?C"?GD?_?.?p??Pt?(?x 6?:????? .??3H? ? ?????O??? ?#G1????????f?4???#?????h?e?yW?$?"?]#????8nI?>? +1??Q??AQr??e???6??N??t|?3?5?????r???^??k??>????.??(?4??C5?f=G??'q?dW??E?? 0??w??  +*(",**E'_?KR^B????x! \?K?^??M^ +n????? ?3?? ?w???W4??U?s!k??|????w2?6?y??,?4?S4???\!W???7?:?In?yA?eM~?????G??>f? ?'???3???U~"???k?0b2?5?%?wzC???+??\??"?VN?]?te1??J??GO5=?t,????5?m????v?p???u??????}n???+_y J"!??Z???o?0?)tz1?P?b?J?+??B???? +:j?????[f%=?\???w?????~s?9?l?p??C? !?D?C ??7?$?????F??3??#?@W:=????t??Q?G???.???????X????c???_?2?#\?0;{???o ?qz?? +{nN?????9N???y??MWs?d?y???e?T?????8??f-r_?G7??????nv??U????Io?????~i?????]S??R??-???? 8'??>?/NH?8*?E?,?eY8(??y1>??c?|??k?[??????hG?? ???Mq[?7??z?M??0?? 3?G??#???7????WT*\0?>3[}g??????g??q?e??n?ltY`??l????V5?lU?N??d???mh?:? Vg?6?:ZG?f?5Q???h???????x??/?[??1?h?i\P??>?yl???H( b8???}???x??)??4?[?%?3'?????????Q(??' +u??sF??]43?O?"???I?"?e???????H?]n?m3??v?E??4??]'?Lta??P??O.?'X???g:g ^????????d=???Q?????? 3????&J??????c"?I1zYBL?".:G]????2?E-3??}j??2 ??0 ???,,??iX??qX?0?? ??c??c????^???????+clEI?O???jd&j??? R?"9?K??(?O?J? 1???dyT|?22?HWaWo?_k??a??n{?<4 ?0 ????A?????f?[???Y?{I??U?????O?!+??i???f?????]&`QWk?1l??RV ??E? -S??%W"%??@T at EP/?+?????a???Bj?????f?-j?fjjj???????s???????????9??|???Y#??\?'?1 M?s?8?->q????'x??>??????a?????>??n?=?`?^c??=??O??*s?h?9z??t?#??^?????5?e?#2?3?O;i ?z??v????G???y ????????hf?4>??+ ??X?c1???OQ? ???)??n???byY?-??????2??}n?!"7??M?\???6??r??h????e? ??YW=?fVy??p>bk??X???6?uX?:,a ?? ?x??>?-?U??????!jgm?pk;??vUk?Z[_???ajeMRX~?K?|?K??????2Cp?C??????????r ??t ??t??6??1??e?????$??0?????[??_??#?\Z?)%R )??B?7??2???R???o?T??UF??<~rw?Cn?0?)2= (|6[E`?[\?j?M?b:?)%?k?q??x2?L?3 O??5?pH-6????m??v?c ?N???\?wq)???tw0?J.$??t+)~?l??l??$p?.???? ???j???F? ?f?K???7_w|???k??OWJ?=??>??????y??? E?!/??\F?f?rA??????N`????S?3?Z?? ???w?I8G????g?f{????????>??p??'.\????? ?r?%??p?[??]????+??????8[_{b????H.?k ? :????4?? F??k?????/g???[???`???':???f{??}?j~????????1?c_? ??pv???U???yF?D\=?_?D5?xF?I???']?h:?Yz?=??5?D.Z????;???hJY?NP??Sv(2????:G?B=??????/??37/6???[?? W???vx??6?8m?????r??????rS?6??????2???????Fv???k?????O???e??}Wu?????yB??,%??z?R??8??_?E?????uj???????*????^?????z?T??Vv??Z??I???j?? ????B??Z?R?????e??4??^????e??T~??\'?s??SV???9???5?;?????67?WyPS?5Wqp???5!T?U!#T??!???9??up?{???$K??e????0?1?m?g?e_????h??A?}??44?-??????|?R??V-????[l?? +[????C?;??DT? ????p[Q?v??"???J?]u?BW??????\0?tU??W??Q?7???Imw~??;?J?c????C??y??????(?????M?????W??%?PQ?e%PZ?O?E%yB??-?TY +?,???k????E??r=????#,??????6?V?????YTx?? ?W?"?no +\?Ll??????R1??Zb?h?Z+????????#???s*s?-??????T?Xi*?*M??)?\?????x ??$??6??i?'?????F???p????&??D8j??WcFn???`?y??R??????N??:$O?>?H??P?~?4T=P*?S?<&Q??UL?=?^?}??g???.`?[??N??????5?c)?K??? ??v?J??`??????,2?yH? +&?WH?7?)??RC??lc?A???1????\??s???S????\WU?jf???w?????? ??^??lGv????1???Ao???? +????K0?a +$"5??M3? ?l ? ????- ?k?????_????4?? ???}???S????L??i^??&??????~?P?k?-?v^[8??"?c???+(?1???E????=Z?{???l???,?????=$?? ???`??rpXX<%????????????H????G??Lq?'??zGwq?pH7????3?N ?q%3???kB?? -???J?2???????`V ZraE???!?????bh??N`??),??.|??}?????????????;N?#?\?i#5lS??@I??t$?,????|?^???UX^?E? X6???\|'???p-??#&?s???3|?????}??{??9C?o?d????(5?lK?F?FRN$??'??m??Q??? +s#?cNd1???avd fE6B??2????"??J????}c\??\????)?M2??@?E at u????iX?_????8?2?k????2??3?I?E/?h?|????A?@?5;?UG?? ?|=?Zk?{????o]?7??tX???7Y??f?7??,?by? }?q??_???e???>?b/vs???/p??`'+?W?????3??J???\Y?s??i??8??????s?K??>???I?b????84???V??5??x_?C?X??b%V?K?Rl?rq3??OQ+??%?e??p?:=!Twp?en/?J??V?:p??????c????w?-?F`cw4????=??~???U=#Q?3+?g`??L?9?A?s!?8/?b??X?? +??lD??T?C??,?u z;0?9Zx?|???????x????=?G?0`k? ??????????&b?o? ?c?@3j?a?k?]P???w\??p??r?R??Va?? +???C??N?}?"??Q0?;??????@^W?:G???U?:T???}??m???RE?)2P??F??? +?+j1W?????QF??"f+Z?,?c??pv??\??r??/??(????f??p?w????]Q3??FG???(SND?R?yJ)?(T? _??`?^???J?^??L??Z?TP???!f#5?I +????gT???m 2u???????&??]\?vs!???????\?? ???????|?Gx???X??"?R????(t??};?;???>!??#????A ?:??%?!Q.?GB????c?Q????5?d?UR????0b??Y~'?~?GZI?9?????Co z?v????????N??h???02` N?_?49??W:??p??Kz.??q????E??E +???0RE]T?G?m??=???8v???=??g??W?????: ??c???9? ??\? J ,?X??\?o??7???;???+hH?] ?????????x??u(?qG??J??l|Z????:} +??F? ?\}9 J_? ??zG????Oa????a?3j?9M?E??Ar_c??0A}x?? ??!0?z????z"?i?~?"??\=?2??BX?4Y?L{?P4?????{?????????W??V u? ????????a2??a??g????????X??|???7L???2o1?d:???????????X?? +?????????A???????Go(F?s2?m????9?>????~????*F?L??U???K?E??????????~?]?|?%?f?F\???Ck :????+??/Y?/dW?R????X??l?????R]M m ;)?#??Y??U???Vz? +???+>??>-?~>?P???v~?????f?o$? ++P??^??=???bU/?????c??E3 +?^?3???h?O??$tRh?vtR????Bt?????N????s??????{~??y??N??O+?????B;??????kk?lm??i?%_)?W?-Au[???Z???F???*i??J?Z??n?r?.uXh??a??zAm???t??j?~i?\???/r9????s??Q>(@??|'????q?N?R?YZ?m?Z?|????e+?J[?:mUZn?j?-??-???UZ??A?4?-9??T,g?h?9o4?|h4?? i?1;m4?? 8??? ??(#p??V?6h9???????????:o???wk?=O???Zf????R{?Z?5j?7)?hS???fGJ???jp?5??u?g?%?sF????????6j?.q???TBK??z??????#??"?;??v^???u:?i???6?\?:??^?[1g???ejtU??U?zW???N???Wm~?Q?????3???1+]o????K???6??bA?|?????????Z???nb??~qt^??ujsg?????{???v5??u{U????B5?u?.?+??P??[??-F??!#?9j?y??? ?0K ?_????YZp?????i?O?=e???;???|t?r?h?]??o?}?U???%?{T?[???PU?bU????????????P?-.?l???????Y??,??n?o??0???K,J??2????>???&????|G^?|?"?6?h^???$- LSu???@?*?(??,P????J?? +?U\???F??m?????O???kfa?=???)I???K??????{?^?C+??E?N??C\XX?0/??(U??We?V???T(4G?CvC? +??* ??8\)?^??Ry?]???7 +?????!sa??? +??t?~g??>??M????*M?/???i{??=?F-$????n?????yu?T?j?WMV?j?J?f?(r????FndG_1???5??|fd?^??i?I??Q?3?????$?? ?WQ ???C?#?y??G? v??cS??????]???=?R^???x???#??7);?Ls??tW|?f???{L?????????5??.???aiHgx?0c~????B??+?Jq??Vx?`?3q?r7jnb?l?,e'?5'??] ?f'?eMT???Wf?]3k4#??;{5?}HS?_?m?????O5??????5?%m?????Fs/????6I??v?'5p???|k???GBw?????]?????9??:B?????~_Z????P???pdB?&??4?98 +hN??L&p??? ????f3???????I??W? +.?????? g4??B?R?'?"??????$?4?4p???????*??6?)????Y?8?????????P?#$?,/yp#????C{ ?k?kE??G????????????????h?9??,</??@??29y??? ???4?o??h?q????????g????????qv3?v4k?,f\?(_+?MAk??d?_???:? +0??0ah?? b??c0k1 wH?? ??????`>b?}L??????F>yG=???(?At0>?Oe?y???jQ??$ ?7??E3& ?%c?? ?????E?z9?????.??(?+??j??*????'.L*t?e`d?GA? ???DE???K??-&Fk?V????i???=?&&ij??6??i????=???3?????}????????5~|C???}r???$'7I?-???1?t?g??????$dG???o??n?Z7T?o?/?f??J??2}??DVu???_L\?(??(?OI?'8?1S?G?{???R at WI?;?o|?s??X????B???sk??i????? ?*??a?Qg?|?9?m-?V??0M?????e?#U?RgH??I?U??????{t/??X?d?a??`??r???(?Q??j?????(??/??lwR?k)????.J??????? ?e5gX?ih?????{|? +qr??0??{t????2F??-?????'#?/??Kna??(g??0?`4rT5C[??c?7?i??????{??n?????[? ui??.??B??????/?s????q???Z|0??^????I???{??n?????Z?b??????f????m ????????????=8V8?^ ???u??;?9?9?a0??YX?????B8>8ep???????3?`???0V??P???j???L-g??????/c?/?[r?^?k?Et???G?Q|9?^p?>#????+V? Y??u??? +?1Tk??^+ s???PaZ?N?a;?8?- +;??0??G?jA?????;?vu?*G?E???c??q??p-???e/??????C??W???????S??w?V??Sg?:?????d???????Z????V-o?????? ???????~AM?>P?_????????x?????D3|Z?????cv??????O?r?u????Z2??E???68_ {????)j?????Y?1WM???J?Fn?????|N3#_Um?{??o?D??T?.?&?O?q ???i??Xi5?i#?V?c??p-6Dm??5?I?F?SsT??D??)?@?Q?z4?T?Q????Lc?j? Tc\?*?&U?i???*??Un????Tc???B???z????E??%\?;??7??|[F???{?5z????RS?h5??U}L??b?4#6O???????*6???U???*?Z55n???mP0n?????4?????J????Jb???q\????????????y???,J?ZL???????jf?(??L?6???d?4S??MnM5k??T???,?N???*IjW i??I???c*NzYEIo?(?s???L?.?????z?6?:?JO?-?`k\?6zl?hJ6???s?*????????*hIS?%S??y??*L????&?m?)??Z.???~???r???????- +????18????7?Z?^??r?i??????Q??Ce??U???|??*???c7k?=M?,?????^,WF??2????????rf??#?qeeVf? ??x}???[? {?K?#????{ +??T?~o?(?y????Q??&_v?eG??aT?#N??1?sX??????<9???vN??9]??ze8?+=g??9?esR???Ju????O?-C?#dH? +??8??????Vk?tj??vZ???#>????Wo?]??r W?+FNW??e???t?(=?P?????W???K??VY?????d7?}?oe?Mf?'???D!?%7d??z??|?Z?M??>5?B?~? Sh???%???????(|H??#?^???l??<Y=n??????*?w?????v(??E???e??U??????d??!&w?}?d?G(??q?X???d?[e?gj????@?A%j?????? lRL??F??(?I???5j? ?L )?8???? ??p?506I??E ??U2?????/?|?????hLp??#e +?+>h?#???Ql???`???U????K?????D2???I?*MQ"T??5?????U?]?nm?"???E?%l?~???O??0??o????=?y?i??w?Z????_=-? +??W?????QK??'OS??D???7y?|,k?m?Qg????|??I??+?Q^s??g;????{??MK??`/ ?9??/??q0?????n?n5???OFk?|m}?????m/??.O?l ?h????Hl??????Z?M?_??q]m??]?C?a??.d?]??\XN.?|>'ISy?o?????????:??U?\O?`???`?????>Dn?1r?G?U? ??MS?\??-?rls?0?e??e?%???C???F?bu??}(e??????,~?:?G1&??O!K?n???m?\?????? ?C/?\?E?"?K?? ?qa?1?? ?Z??"xm?fZ?WQ????Z??G??r?h? x??DE h ???QI?Q9(???J???d?`a+??;f?r:??? ???U1~?Q?K9 lY?f??`$?? ??&~L4?T??????z????v???4B;hvP?jY??B??????Sp??}OT?v)? ?;og?k8??T?>K?~'zo?.x?Z?^ +`V{\??????????G?P]$M2E????~?u??y?????"?"???IBg +c??F0>?o??}??A8,+????a????&G ?????$?).?z +]=yQ?Z?c????o??Z?n.?)???8??^0Z?u?[???N??p?R?a-8?l8?-?RK?,{s??t?f?????4? +??*?z ?W???????tb '?!|?Mo???_????>?3?)?R]??ih??p???.C?=?t?f????????=i$???????i??f?E???8S???Fe?s?C??3_?g???????]??;Lw????%an?Y7q??????p?s???s?????:?7HW????u!?`?????84????x?'b??V?d?&?? ?X??2?o9?[????????e? +??{?D?Gv at S?w?i???Tt[????N&?? ?xK?d?$|??L????F*%?J??B-?????^???@v??+T?????U???z??N?9?/8??i???B^???8?@???2?Qh???$4b???c+y?~T??&-D??+U????N?n?e?V?!???2???&J?j?K??O?zN?]???:?$???fd?zS???3? ?DK,:???A'?,??? ??:?????R?????G?????*d? 9???o?*??Q?O?;??v`%?]|?!Wt;q?^D??U9???4??DK,?S!?JCk.'??V?K? ?g??!v26?l??t/??.@?.xS?????????? ?Z?%?????\????????T????[?r?????U-i??V?*h=M?Z?R~?$-l?!{?l??.??be??k?k???????2]/(???Jw{???F???Z>?J??????? +_?????f*n??e?T?B? +??)?}??:?"??8?xLT??dey?i?G??Rd5?S?a?? ??jX??V%?+?pB???????fwz +?g???96?=??M????N?I???WeM????????JY??(3??LI????I?dN??l??????????{?:? ?????i?O%?Q????F????7??f?T?1N9)I?3????|?N??4?2????6Y??yJO_???-JM?X??????????T?G??]?} ???p??????3?<^????A??%s ??????YM??6?(?dP?)Mfs?Lf?2???Y???R3')%k???j???Y?????yn(??P?????`'z??^??[3?^?s&cW9?K???)?_s?Md?n????d??&Sv?[??f1?h?T?%W?V???Cd??VBN??sf?gN??r6)6?CE[?R???b,????Whlg??^S????????-??=?m??/??L??J? ?17D???J???!/Y y&????????lE???(?V?(?L??-S?m?"l????????) ??"?#???????I??;?+? ??k +???S???xM?o???? 8 ??8?5Mp?k??J?0,?{`h??s????A???c????{????6 Q? ?z?????z$????????N??/??~???G???m???qc?Y????I?7H??]?[]%?? ?,?o^Z?????J@??V.????#?VJ?q?N?pPWqTWpX?qw?8???? ?{?&???;?h]??}??i8_???????@?X[;?BuK??????|??E??F?[?8??;?c;C ??x?S???x?/9?'???i???????????gp??O??C?'?b???k b??#???Nl'?_&?Hb??-???J3t? ??? {???????Z?+??????????/? +G?????go?u?????? C+?$tL???~????;?)Fg??P?E??MAc&'e!Y?Y????????0?p??,?"??Uvqq9?c9dYt?dQdWAX?P!Q???1??XEDcc?1I?ft?t&?I[???I;??w??N?ikLm???Og:???=?????????C???>?;N??e?{??s???.???q?p???:y??? ?????o,{K?o\6x?T?????}x?6??k7?E?\???e??%?2?,??????N?????A?Kx??u????1?9??g?=??g*?2???????dOn?S??6?:?? +?x?XBd??jm?{z???!?'???C??(?=??f?A#i#)t> M???;/D#7??^??Llj??|?? +b??wF???B6?#?o'????1?????M0t'??!?!?q?d???g?$fv?>sfp?>?Rg?~???????@???u?>???q)??@???"?i??N??1? ??/?9??4???b????a^????W?/????S?}?T?V? +j???, D?0???f?,??,?x?5K]???9??`?at? ????????9?? ???C??ZGf??????G?d_?A ??D.B?1^??vG?#????(.^#?8?!??1??s?^??_???Bx?-p?? ??^?k??Y??uX??5????:?'u?????????0?????? + ???2?3 ?6(??2A?7??[b?q?5Q??X?x??[???&1U?F??ML???Dc?6?m?qi?h?N(9????|??????????N??B??h???=4 ??pXp??`t???@;?-???1????0}0P+b?1???a??1 ???q????3.?K?Kl?e??I??ZX_L/?"??/^?C??s??u??uq?0????'???f????=0?????? ??/ns?n??w????C??????!??~?G??????????Swq ?q?p??0n?8n?>n?~???}??? ??g6?O8?ou^?u?o? Q?t??????s???/V)?+?WU??????L????I???5.???K?U???????K:??:??.?3???!??E??+?????u?d-????lp?D?+???S??@???m????1y|??;O?h???-??,?? M{?M|??}?????1hG?u?=?????`?5??>?#???s????n?cO???Vh?%??Y'??y???1?Q?????;x????!?>HVX?~*??^??]?`rc?ml?]P??: ?! ?????w?Cl/Qk?r=Ge???[x?^e4y??v{??=Y?Hx ?R?X!???B?Q?? ?V#???y???????^c??F:?????????Zn?Zr8??s?????? me??????????qm?_u ?g? zcZ?4=?gS?_?&w,????5?S??:Ujl?Z5??W??4?????%??_?Q??T?X????x????| ?C>?b?z'?Xy~yZ6???'wE???qdcIK?????M|,VM??????A??WmP????&x?FW???X=2E????H?C??"d??T?U>o????t??^??u???o%Z?H>??>??D???.?$?2?? +S??H SnJ?rR???b???L?RsdM-TfZ?2??J???b?????JIA???PR?^%??'???-??????+j????z,?????+?F?i??D5?Uy???y??SVfO?2?j`F?22??i?%3K? ?2p???#d??*?6Y&?l ?-U??Um{g;?X??o?A????O?l?n??L??Lb??9?\? ?F?f??M? +?????? +?%+\?Y%g ?9;U??V???5 ?T?9?d??V?D??g)??D??We??~??k??????????l???y ??Z?E,??F????????7P?J????*?o?)??DG?8"??0??0??#S?v?+???(g??9????E?E +w?S?s?B?????????????|?????????w5?p???3?aM?&??????'???BMJ?*???bJ?(?4F??&?+??Oi?"J???*W??J?\???5C?]???k??\;?:??\X???K~T?b?v???,?JF??S???{6?|???auR1{5??$S??????T?;Ta?(?????NQ?U!n??=???*?????b?L??>?0?????KY at EE?[A?K?bRf??4n ?(??????Xs???b?X?? X?H?uK?Kn|X???{;????U?k0?? ???"?F?s_|r?Y;???? ,]}?P9?, ? +4f9?('p???U?_?????s?c??f??Q.5[x??,?g???:r?I??/7|??a ???j?c#??p@]??H?4z A????r=[s?g?C?L??V????@????&P????????"l?ck2????}??~???s??7?T"6???@?!|X?-??8FLN????'??S,???NS?)???: ???]???#p?? ?'?{?h????Q ?w??.1?)??X?Y???84?5D=s???|????*C?S?S +?D???z]E??????9?? ?,????c_g,???e??G??g? !???v"?'a; ?y?a>?g?6/amZD?b{?7???????q??x?w??J???kF?f?;??b)??;[?>W??3?0,?b? ?????>??K?cu?e??Z??u?Lm4ji???? "?{???????mF ????????m~?sX???????9"???~?S????8fP????xl&/?T??2"Q???T?kt?aU?? +*???+?{??,m}?[??9j:??????y? ?h8??H?c??S???G~????G????Jk?????"2+??bNXD5/?6??Out?*W???? F??~?aF?^F???hTwx?t%f?? +"?!d;?8x???D?3???J?RB??Gq?'??t!+${ ??|?{????? ??X???~g??x????1[??kV?????y???PxF??y??I??XD^L?G? +??|x??M)[%#')?r???L?c:?7?J????C?=??q?+? ???????T;????f?'?r?l??k0\?8S(Lc???k???????@?????????4X?O??n>?G652???g?x?????T???"}?(?/D?~? +????5??F??k??P???f??C?? ?R??? ??`??@?v?w?(gn?7+_CZ T?~5?-??y?f~d?&'#?b?l???@7??($?????5<(\?A??_??(??????"F???  +??l????>?????Of??]?&>/??'??tQ?I74,??|??"<??]??q.?5?????.???l??EM?b?&???dW?1Q??W???e'c????4"? O1?'e??S??+???S??s?V ???A)'40???58???k]z??jc?? r5b???]?4'??<4????& 3???d??)^C?R?????? J/?{z?\??4 }?\?;??v????S??????ou????o???%???? ?l???\???'?I5I?&?M???'?UC2?`?????T?C^cx???\mp?OP???z?????oc(?? ??B at OS? ,l????cL??h?????gS;????? X??&????????}?e????P at O???E???)f;m=?????????? -??b7?z??? ??c?p??g???=?q?>??u`??&???1?????b?v?G?9??x?????S????w????????9??????????,o??Gm%?}??????R?_?6K?/??Uo?>??#?A?>????[?????? ??p?o4\a?)?5%?#???`/r????-?C.k??*??:zR???m|Ap???h???Z?p???{\??*????V???G??L?G6??Bf???7m?@??c_?? +?8?;?O?b`? ?&?R?????x????G??`?????+?d?Lpe???l?B??h?S?2?4???4??29R??IMu???:_?????u????????(??ks? ??S???]?QQ?W?,edd?v?A`d(???MqA ???5j"??.,??hN??Xj??9????4???I??&??$m?X?t ???9&'??,?{???}???M??^????j???G4sD??^??????^6???T??<yi??tM???$?Z??6*?w??}7)??W????{R>R??5p]???y?????tW??iV?l +C???\?I??(M4?)????? M?S??b???Tjh?RB+??B ?v??v(??3YM'cz \??4(k?????O?w/r?? m ????????????Yp???E?@?0????.[X?R??????l%FLQ|?t?E???\'?y????5??-?? E??+?| (2rP??!?r[??!???????????"j??'N?i!r(+?M??>J?2(1????(????u?M?h?bb +S?(K???E??6+????Y;j=???c2Z?(??C???K?1C:?3$?n,Q7?w3???r?? lu?2?J8R?? +?n&{??%k??bbC4>6BQq???????|?%???P???Z?$.Sp?Z?I?PP?? +H|^~???????+(aH?L????U????>?!?VP????l*?*S???r?\p$?????Q?H???q2?D+4%A!?dL?Q??X?,??+??T~?5e?&?>?????w???'o?W?M?F?f?G????:??????x?v?4?????Rz??ll???l?e?THz??????aVPF?2l??????)?u???c?? ?X%O?O???8???4???2??G???$y?b {?????` ?s9???\?2???=I??Q????)????,??py?Z????yy?M?{^3?????1???|4}:6???????<?"??bi+V?????f~ZO=s?i???$j?@?b =???????GE?qx???X?"HSL????b~T\V??|?(?,LA?"? ?a?????{)wTwD35??y;A-?(?'Y?$????'F??;?MnN.6'??A??@ur?:Yd'?qBv'w???????y?0.e(?????(??A?#/?:?g'Z?K?? +4R?????c5;??xx;? sX?O??%?g'?9?i??????e?L?;5F????aV\???p?/E-?E#?h??(???N4@????[????X?????????E???fzV@?z?SI=???????h?1FSb?a???????LZ?I+??????????ja?- ??M$`????6???>7???]???v??^?Pn"*???Y85?S???[???h?Da?h??v?Q;];?h?m?? ?????rch?? ??h????.?????jy=L???N?????[????q???0??H?8?????????u????3b+?f?um?Iy????????A?w???6v?????@? {??1Bu_?0??~vd??~?????C5?=?????~??????p??h?????q?p@o??0V\?.Cp<}??H?`g??/??~Q??l?gE????8/??:2???h$?\???A\ q????q???1`??G?s.?2l?^:?l^??3V????K|? {?? ????]?a?)?/zK????_?V?,8???_p|???R??????3??U{:q+?.??2J?m??3u?I???u=?????pG?V???g??i???B?????} 8?????????y?c?e&O92??O?"9?[?G?!v-?RO??7???k???????????????2???h1???n=????????y?r??c!G +?'? +?B????~5P????P???M?o???????????}????? ??E?? ????6??W??? 8????Z/???&O$9???F?lV??)%? ?????C??j9??L??? 7???n???oz?????.w??rc;?6L]???h??[??A>r??'??t5??9?C??w3?Gq??8?\?????N-'?jmc&[x??j6?? ???r????dl????B??d??t] +{?#?BJ4H?? +/8k?i??n????????9?.:?G;??Nm???i???r?????:??s?F???qt?????MN??Tp2??z???:???M?c#~?D}t???????z?r-kf?F"?@???5ta}Z???[P?kV?l??62?# J0yTCe?M????A[????2?m8?j%???????F5????[??*??j?j5??J_I?W1???????r?????H=y?#?(?D?Ol^???M~?? g_1`%??????O>X?`??UV5????u?????O??????%?Q?L?????~?o?{?Q~ 7?????????e q??(^'????4??'??+ ?|???? ?`?N38]*&?ET?jb1U^@?/?r3??G????`^?v]?x}w yp?#????Fb????C??s?j?,xQ?-^??_e?l???Z?????dv??U???^nC?? ??5???qe.(?pS???n?+??S??n??5???H??P?#\??{'?{??j? %??f??eA???????h?U?"w`??s?T??????R??u?0??e?"g?O?!??rAI?o(?x[?=%???? ?{Y4^J8Zs?-??o???A???M5????,???m2*w?E9?l?29?aJS?)K ??r?y?:?R)?Z%??)??-?ePv?Q?Y?+?rS6?mY-+????????? ??????(?n4?6y?v?#7u??JdC)?[?? ?+?$gx?R#??????t%F??U?Q??G{gmQ?u?l?E[?i=?p? Y???l???g??~?????????v???%???????Z?M-?R???*?S#?x??c?r?,??l??q(.????????LQ????oR?}????o????B???h????w?g???????e?|????=R?\?????v?h ?5??jd 5?CnR?M??_??!2??+,5V??$??????W??X? W(?U'W? ?m??B?? ?.?? ??N?D?8?{s2??? ???u T??x_M????r?An?pT?l?f)8???L?????Ps?s??]?Y98???9??s}?>?Oa?lN?l?f?'???????"K???lB???~?????6???U??????G??????$?(?;L???r?M?.??(?Y?.`?Dj????? v?V??!??.?s??N???H?.?#?n~????????,a???Rb%1`?z????p?x")X?]1????0`<$?C?=l??????o????????(???????????3r3}???\7#??K ?,c?|r?$?q?d>??%?Z1??h? +F?V?0???U?h$?b#E? C?? ?#????C??5?)?????[9??qNa]????+?K1????f)??)????????A?e?z?????0/_??5,R???>"?F?????????+j?}?{x;}? ??n??????R???F*???W(q?#G?????X4?&b?D,1?k#?H5?l?h`??!i B???"??K?P?Hgxu???k?^j?i????k ????N\???Ol8??{ ??p? +d?P????E'??`?w??j?? ??6??????????zhy?S??m???????????K??.?w???M ? ??????}??g??I=??0???z)?j??"????Pl?M?? ?z?#??{l??????/S>????T??&>A?????3????}????c???S????)?A6???`F???nG????IJ??q?j??}? +!? )??2??f????CH?C1?f?3????0}????.2?0???_5 y?}??a??jl)?,??]??4??i?f??i;3m?c?%7???5:?????(?Q:zCE???_??????2i?j??,?c???$?(g??,?? ?O`??i???;??oP??i?F???C?????N?d^?G?g?a??????8????16?M??c???@q?h?c0??/????+???????a{?????m3?=?N/^n?$m7N?"?Y??lc1?&^o4???s%??u??7????.kb?????1??X?P?c"=????>???o?[??????p??}y ?`e?c +?R?+???????o?yK????c]??hl????S??>??Oa?I????=2? {?z??N????X_^?\^MI?2??Ip +aL?L1???G?7?X?E?W@????f???=???g?? +=?'??v?n?u?&??????zx-?^???9x??3??0f????h??x?8+te?.8[??????c?????K?t??:?Xk?????&X5?????G?#????g ?;????/z?dg??t?x?b?????????Fu???:?8??]???jYk?u?z?49?e?)\f"???B???x?????????k??3?l?(??????b*S??W?c6?FX?`5??VX?2???->???&i?o???s??Y`????G?5O??N? ??sl?Ix??????Uy!??????k +?b1???????j???&X??????O??n\?????F??????U???z?????I?????3?>????????????MQ?6s?\o????x4?_????yS\k.??]??? ~???1KO???j?????i V?G? ?????*}??;??9}8I{?w\????r??????s??e???U?T\q-?:????V.?ei??U?? +=Qn??L?4}Yj ???%^?b??E?h?s????a?d?y????? ???X??m??????>1=?Fd??yQ?pUT%L??????8;???e#??I??4??(N?=?&?????(H>W&???/????#/????x +?#4???????>?v??;??v9Y?>mj?Xmn????DUZJ?H?????q???(]%???(?:???GaZSL??co????E??]?????FV??"s?K1~???Yc??'???3?? ??sg?3?Ne?Sp-?????\J?9Ef?????(?H???eF????2cJ?g?F?????????yq??????:??6Ef????}(?f&????????Y??????? ?????ig?q??^??????x?Q?e?d?i?#5f?Lm +r?#7;=r??c????=?(???rf?????1????cro????"%??H?;#??????#!??H?}+?&}???A???YG??G????8??wj??W??=R?6?H?????????2#crn??????H??:FOi???E?\?2FvEB!?T?~+??9 _?????Kq?9v?8B???v?~.p[-GK?f?3?ANf?M??L??qE)?:5#R??DrQA$M?????P?t?????K??Rm1RJ???SOt"???z????"?k\?7????~#s????????:i?e..???K&?#??!5tT?A5d?M>???mj???l?@???[? ?V????;?v?r3Q??E????}????v???R?l?????Hn??O??Q?????????_????G???????????i? d???eG?????k1?'I?CVG??:|n>???????W Z??c#????3? ??~???????O?>??????w?????4?u???V???$m?Y?/????l|c????3v??????+ +??5??D !$@ , ?"?? ?,1l!,?-??\? +?` "?Ttpc??A+ +h?u7????U?u???????3???#?3N;?;o???s??}????v_????%q?z?V,~?????X?M?h6??f?Xk?P?&??;????h???v?-?0%L???k"??????!K??]qH????8HU??0???????c????2??_Rx?O??hBZ? B6P?&??0{?a?O????c.??v?q~BC???????G??p&?9/y????]?I????? ?.K?Mb???$?????hp??xK??g?)?????????\?r?SOc???/=??'?k*? ?Av8?????O??????o?3???Wi??????????g???8N???8?QwO?? q?j?Scq?\??{?;/E??P'!???? ?T*?V?9?B8???,6??:???Z????+t??_ ;i?v?B=????????????Ac?]??????????S&L?8]S??????\???K?_-?u?sS,???[m??/????'???h??:?]???\?????????\??,}?e ??0F????Y g???y?z8?]?+D?????r??5c?l?6??L?,?3Yr?q????v??I]6????J?b?????p +???@8C? ?3J*c?8??G??!?t?Z????4^???E:?Z?'???l?Ib8????????????l??u??>{-')??h?[;'???3?t?bgm???u?q?&?F ?:?G?L?w?d?T??12>??'l???L???s??~J??:?J?m?u~??q??m???jR???B7X=a?k(?1?? !V??O&D2?j?]??H?^i???U??h?}O4??i4k??e}?2??{??v?????9??????6??Yj3E???zd?????9??s?6?????????G??X????M??h?y\??2-Z???f9?+???E?s??7B7c????q?s??? ??_?.'K??ia?yP?j}RI6U??`?S,'y??C?V????s?D?s??E?>??+>?M??"??c???s?????????i?"W7??K5?????WR???)?C? h??!??4mm?w{H???M]r??????????G????????Tj?5ua.?`G}X@?p'??^??/5cuc???????????PO??)?RW??-?S_?Qc?3????3`?????l??5(zD-zJ=z&R?J?M??s l\u?{??s[ ???U~1? 9?i?}?g??eqL?a|?????r?*??j-??PW]?8???AC0C????)TP????<S????RQJ2c?3??/??h+A'M?6c???6v? ???/???\?6|?????????.?[~N????%(???(h??%? yB??)???1$?????????x???ua???wqIIii????????R?RH!?RH!?RH???w?????Ky????]A +)??B?(?X??'l?B"'l~???? +\?G?A +)??g??C??)????f)??m1'V???W?K?M=????N$nHz??!J??)???B????c) +??d???????Z????y??!D???o :?p?$??u??@m??bW8a? ???1?0??[?????F ?^'x.p N?at????$)?????? +????[.??=M? ?(?u???K?W/ND??????r???Y?N??+??5?????2l????U??M?[l??DFLY?? py) p\???u?k?Y"?s?~???w_??(?qor???RH??GY??????(????}???????????*??Z3R??qz& +??Td??$DP??iP???n?B??^U?M???>?d?????^ L??????T? b"L???$?j`????8?Q`?T?rC?[??RA?+;boeiie(??x}???;??u)??????\Q???Uv?t?o???[?R?E?*?w?]Y??w??l??U}1?????????C@?;????v??j.??!^????@?N" e`?`>????v@?-u??q?n?(?d??3?7??_???y? ??.?????u??zz??????`??????*+j???:??????????????z\?=???4?"??qcG~_??zIW>`??$??#?? ???=?"?:\?????0??4a??|@?y?>?"?????{?4oXWMUM]?U?C ?q???8??-%5??og?J?/$?n?.&?J[?6??v????{?{?????6=?J????Y=?X??????? ??9(y)??L????Wu???F?G?x?!???/ +1?l??`??>???Y???p??@6?0???? ?F2g>d?0?!F?FC???`??R????V?? ?q??????O???+C??o???|? H??G?5?w+#;e????*??kj??a?/s'#?m&???F?0~9LBz?)?8?R>? u?[|??(H`t?b?-|???9?)?1??k ?B@ ?"??????? ?~???s?|? ????????? ?]?H?R?f?:]? >.?%????e?)>#?o???{????f?e b???#-? D????5xo[? +?3o?A#?8c????? ??$3?D>p? QxC?#?pU?c?~1`?$?ld4(?0@&?????? ?!?h?rGk#?9?ND?V ?m??c?????s?,??0ce????~?? ??c???j'O????#???O??Ud`}m??\??Y????u'?c>???u????|#c"?cJ?RB?X?#?B???dJ??B??B?RQB???%?e???R"SR?L?T?;o?p?????9?y?s????????~?Z(C?@?T ?ph?t$m|??}1????? A? ?%???0?=????C?p????? ? ?????t??e??5,K +X?K??J?=?M????U????3?sv???S??1}x?//? W?6??_???U?r@ ?V?"9?????K7??g???D?lL 0???b?!#?'?C?T???a??%M{? @?}:>?">`????/"P???YX? G1`????.???j?????N??jdI???y?????tg?3L?, ?????78F?unQ?@?*??'?w:??v/???\???5c0??? ??bvI??07?1?cibX +?? `93??[(Q???y???G&e??U???(?w?#?)?=?, +??@?F?)p?T????M?k?41d????&?BO?"w0h?????@ ??`??????`???t?{w? +,b$???????iK?".??$Ee??.?KM^?>?Ow?eK???]??z? r?E??dAr?'xDG +l?I??B(?=?u +d?i???$V +:?p??kI@????tr ??V)? : A?5? `;!? ?w??C?e?C0v???h?0t???????????=?E1??T???,*??_P????O?xj????:?55z?(?PR??S??d??cp???<?wy@!x?????8;( ?_ +?X@?!?????; ???\O L ?$???X?1?(D??G4?????6?}#??)??7c?????????E/?? +b?Ly?S???}!??;??l???70??o??????g??`??9.?>x????A??' +?p??a\mG? +o? ????I?-??3???????wo?_??????{1??V9.??mmiann>m???M?????^?t>3=--=?rn? ????e9?l??XC?I??=B??!Cf?'??%o???-????$nN???~??=:???@?]?????&???9BR??6??fmx?S\?$?u??"7?C???Q???k?f??+?p?a???[?Z????U??V;hY?????C??Z?>????3Ky^f?*M}?n?q???u9????D???f*^={??A???J^U????nZ?f?J?.?????d ?c?W??B????????U]?z?}?????>bg/ < 8e?M??'?;H?6???u?O???^cS??=m%???:?7?????{??????r?y?O?.[?c?o?7P?K???1?> endobj xref +0 74 +0000000000 65535 f +0000000016 00000 n +0000000297 00000 n +0000076452 00000 n +0000000000 00000 f +0000410108 00000 n +0000076509 00000 n +0000077123 00000 n +0000170149 00000 n +0000148671 00000 n +0000170122 00000 n +0000393466 00000 n +0000392678 00000 n +0000391748 00000 n +0000169785 00000 n +0000169898 00000 n +0000170009 00000 n +0000393751 00000 n +0000149899 00000 n +0000150219 00000 n +0000151142 00000 n +0000150734 00000 n +0000393890 00000 n +0000394242 00000 n +0000149171 00000 n +0000148529 00000 n +0000203195 00000 n +0000219291 00000 n +0000291927 00000 n +0000364079 00000 n +0000366411 00000 n +0000077671 00000 n +0000146206 00000 n +0000145916 00000 n +0000111054 00000 n +0000124487 00000 n +0000129064 00000 n +0000146914 00000 n +0000145965 00000 n +0000146255 00000 n +0000148730 00000 n +0000148904 00000 n +0000149134 00000 n +0000148825 00000 n +0000149284 00000 n +0000149427 00000 n +0000149455 00000 n +0000149550 00000 n +0000167371 00000 n +0000167462 00000 n +0000160401 00000 n +0000160485 00000 n +0000155877 00000 n +0000155961 00000 n +0000151550 00000 n +0000151921 00000 n +0000156329 00000 n +0000160991 00000 n +0000167809 00000 n +0000391797 00000 n +0000392727 00000 n +0000393514 00000 n +0000408642 00000 n +0000408667 00000 n +0000397239 00000 n +0000394414 00000 n +0000394686 00000 n +0000397492 00000 n +0000408974 00000 n +0000409129 00000 n +0000409198 00000 n +0000409471 00000 n +0000409551 00000 n +0000801531 00000 n +trailer +<]>> +startxref +801787 +%%EOF From hpk at codespeak.net Tue Mar 1 17:00:42 2011 From: hpk at codespeak.net (hpk at codespeak.net) Date: Tue, 1 Mar 2011 17:00:42 +0100 (CET) Subject: [pypy-svn] r80407 - pypy/extradoc/pycon-advertisement Message-ID: <20110301160042.DAAE6282BDE@codespeak.net> Author: hpk Date: Tue Mar 1 17:00:40 2011 New Revision: 80407 Modified: pypy/extradoc/pycon-advertisement/pycon2011flyer.pdf (props changed) Log: set mime type From hpk at codespeak.net Tue Mar 1 19:26:48 2011 From: hpk at codespeak.net (hpk at codespeak.net) Date: Tue, 1 Mar 2011 19:26:48 +0100 (CET) Subject: [pypy-svn] r80408 - pypy/extradoc Message-ID: <20110301182648.58211282D00@codespeak.net> Author: hpk Date: Tue Mar 1 19:26:46 2011 New Revision: 80408 Removed: pypy/extradoc/api_html.tar.gz Log: remove this From commits-noreply at bitbucket.org Tue Mar 1 20:09:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 20:09:14 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: repr(rffi.VOIDP) == "<* Array of void >" Message-ID: <20110301190914.EFFF1282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42369:0aa0b2e06c7e Date: 2011-03-01 20:00 +0100 http://bitbucket.org/pypy/pypy/changeset/0aa0b2e06c7e/ Log: repr(rffi.VOIDP) == "<* Array of void >" diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py --- a/pypy/rpython/lltypesystem/test/test_rffi.py +++ b/pypy/rpython/lltypesystem/test/test_rffi.py @@ -766,6 +766,9 @@ def test_ptradd_interpret(): interpret(test_ptradd, []) +def test_voidptr(): + assert repr(VOIDP) == "<* Array of void >" + class TestCRffi(BaseTestRffi): def compile(self, func, args, **kwds): return compile_c(func, args, **kwds) diff --git a/pypy/rpython/lltypesystem/lltype.py b/pypy/rpython/lltypesystem/lltype.py --- a/pypy/rpython/lltypesystem/lltype.py +++ b/pypy/rpython/lltypesystem/lltype.py @@ -395,6 +395,8 @@ return "{ %s }" % of._str_fields() else: return "%s { %s }" % (of._name, of._str_fields()) + elif self._hints.get('render_as_void'): + return 'void' else: return str(self.OF) _str_fields = saferecursive(_str_fields, '...') From commits-noreply at bitbucket.org Tue Mar 1 20:09:18 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 1 Mar 2011 20:09:18 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: s/VOIDP_real/VOIDP/g Message-ID: <20110301190918.CD0922A207D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42370:32faea844dec Date: 2011-03-01 20:09 +0100 http://bitbucket.org/pypy/pypy/changeset/32faea844dec/ Log: s/VOIDP_real/VOIDP/g diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -454,6 +454,6 @@ #___________________________________________________________ - at cpython_api([rffi.VOIDP_real], lltype.Signed, error=CANNOT_FAIL) + at cpython_api([rffi.VOIDP], lltype.Signed, error=CANNOT_FAIL) def _Py_HashPointer(space, ptr): return rffi.cast(lltype.Signed, ptr) diff --git a/pypy/module/cpyext/typeobjectdefs.py b/pypy/module/cpyext/typeobjectdefs.py --- a/pypy/module/cpyext/typeobjectdefs.py +++ b/pypy/module/cpyext/typeobjectdefs.py @@ -11,7 +11,7 @@ P, FT, PyO = Ptr, FuncType, PyObject PyOPtr = Ptr(lltype.Array(PyO, hints={'nolength': True})) -freefunc = P(FT([rffi.VOIDP_real], Void)) +freefunc = P(FT([rffi.VOIDP], Void)) destructor = P(FT([PyO], Void)) printfunc = P(FT([PyO, FILEP, rffi.INT_real], rffi.INT)) getattrfunc = P(FT([PyO, rffi.CCHARP], PyO)) @@ -46,14 +46,14 @@ objobjargproc = P(FT([PyO, PyO, PyO], rffi.INT_real)) objobjproc = P(FT([PyO, PyO], rffi.INT_real)) -visitproc = P(FT([PyO, rffi.VOIDP_real], rffi.INT_real)) -traverseproc = P(FT([PyO, visitproc, rffi.VOIDP_real], rffi.INT_real)) +visitproc = P(FT([PyO, rffi.VOIDP], rffi.INT_real)) +traverseproc = P(FT([PyO, visitproc, rffi.VOIDP], rffi.INT_real)) -getter = P(FT([PyO, rffi.VOIDP_real], PyO)) -setter = P(FT([PyO, PyO, rffi.VOIDP_real], rffi.INT_real)) +getter = P(FT([PyO, rffi.VOIDP], PyO)) +setter = P(FT([PyO, PyO, rffi.VOIDP], rffi.INT_real)) -wrapperfunc = P(FT([PyO, PyO, rffi.VOIDP_real], PyO)) -wrapperfunc_kwds = P(FT([PyO, PyO, rffi.VOIDP_real, PyO], PyO)) +wrapperfunc = P(FT([PyO, PyO, rffi.VOIDP], PyO)) +wrapperfunc_kwds = P(FT([PyO, PyO, rffi.VOIDP, PyO], PyO)) PyGetSetDef = cpython_struct("PyGetSetDef", ( @@ -61,7 +61,7 @@ ("get", getter), ("set", setter), ("doc", rffi.CCHARP), - ("closure", rffi.VOIDP_real), + ("closure", rffi.VOIDP), )) PyNumberMethods = cpython_struct("PyNumberMethods", ( diff --git a/pypy/translator/c/test/test_database.py b/pypy/translator/c/test/test_database.py --- a/pypy/translator/c/test/test_database.py +++ b/pypy/translator/c/test/test_database.py @@ -5,7 +5,7 @@ from pypy.objspace.flow.model import Constant, Variable, SpaceOperation from pypy.objspace.flow.model import Block, Link, FunctionGraph from pypy.rpython.typesystem import getfunctionptr -from pypy.rpython.lltypesystem.rffi import VOIDP_real, INT_real, INT +from pypy.rpython.lltypesystem.rffi import VOIDP, INT_real, INT def dump_on_stdout(database): @@ -223,7 +223,7 @@ dump_on_stdout(db) def test_voidp(): - A = VOIDP_real + A = VOIDP db = LowLevelDatabase() assert db.gettype(A) == "void *@" diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -159,7 +159,7 @@ if not struct: continue func = getattr(struct, slot_names[1]) - func_voidp = rffi.cast(rffi.VOIDP_real, func) + func_voidp = rffi.cast(rffi.VOIDP, func) if not func: continue if wrapper_func is None and wrapper_func_kwds is None: diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -20,7 +20,7 @@ return lltype.malloc(rffi.VOIDP.TO, size, flavor='raw', zero=True) - at cpython_api([rffi.VOIDP_real], lltype.Void) + at cpython_api([rffi.VOIDP], lltype.Void) def PyObject_FREE(space, ptr): lltype.free(ptr, flavor='raw') @@ -42,14 +42,14 @@ w_obj = PyObject_InitVar(space, py_objvar, type, itemcount) return py_obj - at cpython_api([rffi.VOIDP_real], lltype.Void) + at cpython_api([rffi.VOIDP], lltype.Void) def PyObject_Del(space, obj): lltype.free(obj, flavor='raw') @cpython_api([PyObject], lltype.Void) def PyObject_dealloc(space, obj): pto = obj.c_ob_type - obj_voidp = rffi.cast(rffi.VOIDP_real, obj) + obj_voidp = rffi.cast(rffi.VOIDP, obj) generic_cpy_call(space, pto.c_tp_free, obj_voidp) if pto.c_tp_flags & Py_TPFLAGS_HEAPTYPE: Py_DecRef(space, rffi.cast(PyObject, pto)) @@ -58,11 +58,11 @@ def _PyObject_GC_New(space, type): return _PyObject_New(space, type) - at cpython_api([rffi.VOIDP_real], lltype.Void) + at cpython_api([rffi.VOIDP], lltype.Void) def PyObject_GC_Del(space, obj): PyObject_Del(space, obj) - at cpython_api([rffi.VOIDP_real], lltype.Void) + at cpython_api([rffi.VOIDP], lltype.Void) def PyObject_GC_Track(space, op): """Adds the object op to the set of container objects tracked by the collector. The collector can run at unexpected times so objects must be @@ -71,7 +71,7 @@ end of the constructor.""" pass - at cpython_api([rffi.VOIDP_real], lltype.Void) + at cpython_api([rffi.VOIDP], lltype.Void) def PyObject_GC_UnTrack(space, op): """Remove the object op from the set of container objects tracked by the collector. Note that PyObject_GC_Track() can be called again on diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -1248,7 +1248,7 @@ reload the module if it was already imported.)""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.VOIDP_real], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP, rffi.VOIDP], rffi.INT_real, error=-1) def PyImport_AppendInittab(space, name, initfunc): """Add a single module to the existing table of built-in modules. This is a convenience wrapper around PyImport_ExtendInittab(), returning -1 if diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -559,8 +559,7 @@ r_singlefloat = rarithmetic.r_singlefloat # void * - for now, represented as char * -VOIDP_real = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True})) -VOIDP = VOIDP_real +VOIDP = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True})) NULL = None # void ** diff --git a/pypy/module/cpyext/stubgen.py b/pypy/module/cpyext/stubgen.py --- a/pypy/module/cpyext/stubgen.py +++ b/pypy/module/cpyext/stubgen.py @@ -36,7 +36,7 @@ C_TYPE_TO_PYPY_TYPE_ARGS = C_TYPE_TO_PYPY_TYPE.copy() C_TYPE_TO_PYPY_TYPE_ARGS.update({ - "void": "rffi.VOIDP_real", + "void": "rffi.VOIDP", }) diff --git a/pypy/module/cpyext/test/test_longobject.py b/pypy/module/cpyext/test/test_longobject.py --- a/pypy/module/cpyext/test/test_longobject.py +++ b/pypy/module/cpyext/test/test_longobject.py @@ -94,7 +94,7 @@ def test_as_voidptr(self, space, api): w_l = api.PyLong_FromVoidPtr(lltype.nullptr(rffi.VOIDP.TO)) assert space.unwrap(w_l) == 0L - assert api.PyLong_AsVoidPtr(w_l) == lltype.nullptr(rffi.VOIDP_real.TO) + assert api.PyLong_AsVoidPtr(w_l) == lltype.nullptr(rffi.VOIDP.TO) def test_sign_and_bits(self, space, api): if space.is_true(space.lt(space.sys.get('version_info'), diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py --- a/pypy/module/cpyext/longobject.py +++ b/pypy/module/cpyext/longobject.py @@ -151,7 +151,7 @@ pend[0] = rffi.ptradd(str, len(s)) return space.call_function(space.w_long, w_str, w_base) - at cpython_api([rffi.VOIDP_real], PyObject) + at cpython_api([rffi.VOIDP], PyObject) def PyLong_FromVoidPtr(space, p): """Create a Python integer or long integer from the pointer p. The pointer value can be retrieved from the resulting value using PyLong_AsVoidPtr(). @@ -159,14 +159,14 @@ If the integer is larger than LONG_MAX, a positive long integer is returned.""" return space.wrap(rffi.cast(ADDR, p)) - at cpython_api([PyObject], rffi.VOIDP_real, error=lltype.nullptr(rffi.VOIDP_real.TO)) + at cpython_api([PyObject], rffi.VOIDP, error=lltype.nullptr(rffi.VOIDP.TO)) def PyLong_AsVoidPtr(space, w_long): """Convert a Python integer or long integer pylong to a C void pointer. If pylong cannot be converted, an OverflowError will be raised. This is only assured to produce a usable void pointer for values created with PyLong_FromVoidPtr(). For values outside 0..LONG_MAX, both signed and unsigned integers are accepted.""" - return rffi.cast(rffi.VOIDP_real, space.uint_w(w_long)) + return rffi.cast(rffi.VOIDP, space.uint_w(w_long)) @cpython_api([PyObject], rffi.SIZE_T, error=-1) def _PyLong_NumBits(space, w_long): From commits-noreply at bitbucket.org Tue Mar 1 21:11:52 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Tue, 01 Mar 2011 20:11:52 -0000 Subject: [pypy-svn] commit/extradoc: hpk42: remove rather outdated diagram drafts Message-ID: <20110301201152.26907.89664@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/b2cb5d401149/ changeset: r3325:b2cb5d401149 branch: extradoc user: hpk42 date: 2011-03-01 21:11:45 summary: remove rather outdated diagram drafts affected #: 1 file (0 bytes) Binary file Diagrams.sxi has changed Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Tue Mar 1 22:17:50 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Tue, 01 Mar 2011 21:17:50 -0000 Subject: [pypy-svn] commit/extradoc: 2 new changesets Message-ID: <20110301211750.18129.42443@bitbucket01.managed.contegix.com> 2 new changesets in extradoc: http://bitbucket.org/pypy/extradoc/changeset/c7e49cfddebc/ changeset: r3326:c7e49cfddebc branch: extradoc user: alex_gaynor date: 2011-03-01 19:28:28 summary: fijal and I need to write the talk (test commit) affected #: 1 file (17 bytes) --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/pycon2011/talk.txt Tue Mar 01 13:28:28 2011 -0500 @@ -0,0 +1,1 @@ +XXX: write talk. http://bitbucket.org/pypy/extradoc/changeset/dc7b7d9ef46d/ changeset: r3327:dc7b7d9ef46d branch: extradoc user: alex_gaynor date: 2011-03-01 22:17:43 summary: Merged upstream. affected #: 2 files (0 bytes) Binary file Diagrams.sxi has changed Binary file api_html.tar.gz has changed Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 00:09:56 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 00:09:56 +0100 (CET) Subject: [pypy-svn] pypy real-voidp: Close soon-merged branch Message-ID: <20110301230956.A41BD282BEC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-voidp Changeset: r42372:5b755a9ea3e4 Date: 2011-03-01 23:03 +0100 http://bitbucket.org/pypy/pypy/changeset/5b755a9ea3e4/ Log: Close soon-merged branch From commits-noreply at bitbucket.org Wed Mar 2 00:09:56 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 00:09:56 +0100 (CET) Subject: [pypy-svn] pypy default: merge real-voidp: rffi.VOIDP is now closer to a void* pointer. Message-ID: <20110301230956.DC00C2A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42373:61ef2a11b56a Date: 2011-03-01 23:29 +0100 http://bitbucket.org/pypy/pypy/changeset/61ef2a11b56a/ Log: merge real-voidp: rffi.VOIDP is now closer to a void* pointer. In RPython code, it's still an array of bytes (and can have storage), but it is rendered as void* when translated to C (the storage itself is translated to an array of chars) In external function calls, any type of pointer can be automatically converted to VOIDP, and None is accepted for any type of pointer. From commits-noreply at bitbucket.org Wed Mar 2 01:23:00 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 01:23:00 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: a branch to render rffi.INT as a C "int", even on 32bit platforms where it is equivalent to lltype.Signed and rffi.LONG Message-ID: <20110302002300.BFCC2282BEC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42374:40ab700357b9 Date: 2011-03-02 00:45 +0100 http://bitbucket.org/pypy/pypy/changeset/40ab700357b9/ Log: a branch to render rffi.INT as a C "int", even on 32bit platforms where it is equivalent to lltype.Signed and rffi.LONG From commits-noreply at bitbucket.org Wed Mar 2 01:23:04 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 01:23:04 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Start to make all int types different from each other. Message-ID: <20110302002304.7A2102A206F@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42375:7ae5475d94a0 Date: 2011-03-02 01:20 +0100 http://bitbucket.org/pypy/pypy/changeset/7ae5475d94a0/ Log: Start to make all int types different from each other. diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py --- a/pypy/rpython/lltypesystem/test/test_rffi.py +++ b/pypy/rpython/lltypesystem/test/test_rffi.py @@ -693,6 +693,10 @@ res = interpret(f, []) assert res == 3 + def test_type_identity(self): + assert INT != lltype.Signed + assert LONG != INT + def test_size_t_sign(self): assert r_size_t(-1) > 0 diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -405,7 +405,7 @@ NUMBER_TYPES = setup() platform.numbertype_to_rclass[lltype.Signed] = int # avoid "r_long" for common cases -r_int_real = rarithmetic.build_int("r_int_real", r_int.SIGN, r_int.BITS, True) +r_int_real = rarithmetic.build_int("r_int_real", r_int.SIGN, r_int.BITS) INT_real = lltype.build_number("INT", r_int_real) platform.numbertype_to_rclass[INT_real] = r_int_real NUMBER_TYPES.append(INT_real) diff --git a/pypy/rlib/rarithmetic.py b/pypy/rlib/rarithmetic.py --- a/pypy/rlib/rarithmetic.py +++ b/pypy/rlib/rarithmetic.py @@ -482,15 +482,8 @@ return super(unsigned_int, klass).__new__(klass, val & klass.MASK) typemap = {} -_inttypes = {} - -def build_int(name, sign, bits, force_creation=False): +def build_int(name, sign, bits): sign = bool(sign) - if not force_creation: - try: - return _inttypes[sign, bits] - except KeyError: - pass if sign: base_int_type = signed_int else: @@ -501,8 +494,6 @@ int_type = type(name, (base_int_type,), {'MASK': mask, 'BITS': bits, 'SIGN': sign}) - if not force_creation: - _inttypes[sign, bits] = int_type class ForValuesEntry(extregistry.ExtRegistryEntry): _type_ = int_type diff --git a/pypy/rpython/tool/rffi_platform.py b/pypy/rpython/tool/rffi_platform.py --- a/pypy/rpython/tool/rffi_platform.py +++ b/pypy/rpython/tool/rffi_platform.py @@ -612,8 +612,8 @@ rffi.SHORT, rffi.USHORT, rffi.INT, rffi.UINT, rffi.LONG, rffi.ULONG, - rffi.LONGLONG, rffi.ULONGLONG] -# XXX SIZE_T? + rffi.LONGLONG, rffi.ULONGLONG, + rffi.SIZE_T] float_class = [rffi.DOUBLE] diff --git a/pypy/rpython/tool/rfficache.py b/pypy/rpython/tool/rfficache.py --- a/pypy/rpython/tool/rfficache.py +++ b/pypy/rpython/tool/rfficache.py @@ -54,7 +54,10 @@ class Platform: def __init__(self): self.types = {} - self.numbertype_to_rclass = {} + self.numbertype_to_rclass = { + lltype.Signed: rarithmetic.r_int, + lltype.Unsigned: rarithmetic.r_uint, + } def inttype(self, name, c_name, signed, **kwds): try: From commits-noreply at bitbucket.org Wed Mar 2 01:44:39 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 01:44:39 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix a test Message-ID: <20110302004439.B5873282BEC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42376:43d15322e989 Date: 2011-03-02 01:42 +0100 http://bitbucket.org/pypy/pypy/changeset/43d15322e989/ Log: Fix a test diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py --- a/pypy/rpython/lltypesystem/test/test_rffi.py +++ b/pypy/rpython/lltypesystem/test/test_rffi.py @@ -558,8 +558,9 @@ assert interpret(f, []) == 3 def test_structcopy(self): - X2 = lltype.Struct('X2', ('x', LONG)) - X1 = lltype.Struct('X1', ('a', LONG), ('x2', X2), ('p', lltype.Ptr(X2))) + X2 = lltype.Struct('X2', ('x', lltype.Signed)) + X1 = lltype.Struct('X1', ('a', lltype.Signed), ('x2', X2), + ('p', lltype.Ptr(X2))) def f(): p2 = make(X2, x=123) p1 = make(X1, a=5, p=p2) From commits-noreply at bitbucket.org Wed Mar 2 09:06:37 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 09:06:37 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the _csv module (I should have run the tests...) Message-ID: <20110302080637.26CE2282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42377:958bc9287355 Date: 2011-03-02 09:06 +0100 http://bitbucket.org/pypy/pypy/changeset/958bc9287355/ Log: Fix the _csv module (I should have run the tests...) diff --git a/lib_pypy/_csv.py b/lib_pypy/_csv.py --- a/lib_pypy/_csv.py +++ b/lib_pypy/_csv.py @@ -256,25 +256,33 @@ while True: if c in '\n\r': # end of line - return [fields] + if pos2 > pos: + self._parse_add_char(line[pos:pos2]) + pos = pos2 self._parse_save_field() self.state = self.EAT_CRNL elif c == self.dialect.escapechar: # possible escaped character + pos2 -= 1 self.state = self.ESCAPED_CHAR elif c == self.dialect.delimiter: # save field - wait for new field + if pos2 > pos: + self._parse_add_char(line[pos:pos2]) + pos = pos2 self._parse_save_field() self.state = self.START_FIELD else: # normal character - save in field pos2 += 1 - c = line[pos2] - continue + if pos2 < len(line): + c = line[pos2] + continue break if pos2 > pos: self._parse_add_char(line[pos:pos2]) - pos = pos2 - + pos = pos2 - 1 + elif self.state == self.START_RECORD: if c in '\n\r': self.state = self.EAT_CRNL From commits-noreply at bitbucket.org Wed Mar 2 10:04:06 2011 From: commits-noreply at bitbucket.org (tav) Date: Wed, 2 Mar 2011 10:04:06 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed stdout/stderr mixup. Message-ID: <20110302090406.0530B282D00@codespeak.net> Author: tav Branch: Changeset: r42378:5c272f61d591 Date: 2011-03-02 09:03 +0000 http://bitbucket.org/pypy/pypy/changeset/5c272f61d591/ Log: Fixed stdout/stderr mixup. diff --git a/pypy/translator/platform/maemo.py b/pypy/translator/platform/maemo.py --- a/pypy/translator/platform/maemo.py +++ b/pypy/translator/platform/maemo.py @@ -61,7 +61,7 @@ log.execute('/scratchbox/login ' + cc + ' ' + ' '.join(args)) args = [cc] + args returncode, stdout, stderr = _run_subprocess('/scratchbox/login', args) - self._handle_error(returncode, stderr, stdout, outname) + self._handle_error(returncode, stdout, stderr, outname) def execute(self, executable, args=[], env=None): if isinstance(args, str): diff --git a/pypy/translator/platform/windows.py b/pypy/translator/platform/windows.py --- a/pypy/translator/platform/windows.py +++ b/pypy/translator/platform/windows.py @@ -193,7 +193,7 @@ return exe_name - def _handle_error(self, returncode, stderr, stdout, outname): + def _handle_error(self, returncode, stdout, stderr, outname): if returncode != 0: # Microsoft compilers write compilation errors to stdout stderr = stdout + stderr @@ -361,7 +361,7 @@ def library_dirs_for_libffi(self): return [] - def _handle_error(self, returncode, stderr, stdout, outname): + def _handle_error(self, returncode, stdout, stderr, outname): # Mingw tools write compilation errors to stdout super(MingwPlatform, self)._handle_error( - returncode, stderr + stdout, '', outname) + returncode, '', stderr + stdout, outname) diff --git a/pypy/translator/platform/__init__.py b/pypy/translator/platform/__init__.py --- a/pypy/translator/platform/__init__.py +++ b/pypy/translator/platform/__init__.py @@ -117,9 +117,9 @@ args = cclist[1:] + args returncode, stdout, stderr = _run_subprocess(cc, args, self.c_environ, cwd) - self._handle_error(returncode, stderr, stdout, outname) + self._handle_error(returncode, stdout, stderr, outname) - def _handle_error(self, returncode, stderr, stdout, outname): + def _handle_error(self, returncode, stdout, stderr, outname): if returncode != 0: errorfile = outname.new(ext='errors') errorfile.write(stderr, 'wb') From commits-noreply at bitbucket.org Wed Mar 2 11:00:34 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 11:00:34 +0100 (CET) Subject: [pypy-svn] pypy move-rfloat: Merge default Message-ID: <20110302100034.DD020282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: move-rfloat Changeset: r42379:70eb63c3f970 Date: 2011-03-02 10:41 +0100 http://bitbucket.org/pypy/pypy/changeset/70eb63c3f970/ Log: Merge default diff --git a/pypy/translator/c/node.py b/pypy/translator/c/node.py --- a/pypy/translator/c/node.py +++ b/pypy/translator/c/node.py @@ -337,12 +337,15 @@ self.varlength = varlength self.dependencies = {} contained_type = ARRAY.OF - if ARRAY._hints.get("render_as_void"): - contained_type = Void + # There is no such thing as an array of voids: + # we use a an array of chars instead; only the pointer can be void*. self.itemtypename = db.gettype(contained_type, who_asks=self) self.fulltypename = self.itemtypename.replace('@', '(@)[%d]' % (self.varlength,)) - self.fullptrtypename = self.itemtypename.replace('@', '*@') + if ARRAY._hints.get("render_as_void"): + self.fullptrtypename = 'void *@' + else: + self.fullptrtypename = self.itemtypename.replace('@', '*@') def setup(self): """Array loops are forbidden by ForwardReference.become() because @@ -363,7 +366,10 @@ return self.itemindex_access_expr(baseexpr, index) def itemindex_access_expr(self, baseexpr, indexexpr): - return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr) + if self.ARRAY._hints.get("render_as_void"): + return 'RPyBareItem((char*)%s, %s)' % (baseexpr, indexexpr) + else: + return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr) def definition(self): return [] # no declaration is needed From commits-noreply at bitbucket.org Wed Mar 2 13:20:10 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:20:10 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Start a list of "predefined int types" used by compute_restype(left, right) Message-ID: <20110302122010.E7032282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42380:9014bb4256b0 Date: 2011-03-02 11:32 +0100 http://bitbucket.org/pypy/pypy/changeset/9014bb4256b0/ Log: Start a list of "predefined int types" used by compute_restype(left, right) diff --git a/pypy/rlib/rarithmetic.py b/pypy/rlib/rarithmetic.py --- a/pypy/rlib/rarithmetic.py +++ b/pypy/rlib/rarithmetic.py @@ -270,7 +270,7 @@ return self_type if self_type in (bool, int, long): return other_type - return build_int(None, self_type.SIGNED and other_type.SIGNED, max(self_type.BITS, other_type.BITS)) + return get_int(self_type.SIGNED and other_type.SIGNED, max(self_type.BITS, other_type.BITS)) def signedtype(t): if t in (bool, int, long): @@ -283,10 +283,10 @@ if t is int: return int if t.BITS <= r_int.BITS: - return build_int(None, t.SIGNED, r_int.BITS) + return get_int(t.SIGNED, r_int.BITS) else: assert t.BITS <= r_longlong.BITS - return build_int(None, t.SIGNED, r_longlong.BITS) + return get_int(t.SIGNED, r_longlong.BITS) def most_neg_value_of_same_type(x): from pypy.rpython.lltypesystem import lltype @@ -482,6 +482,12 @@ return super(unsigned_int, klass).__new__(klass, val & klass.MASK) typemap = {} +def get_int(sign, bits): + try: + return _predefined_ints[sign, bits] + except KeyError: + raise TypeError('No predefined %sint%d'%(['u', ''][sign], bits)) + def build_int(name, sign, bits): sign = bool(sign) if sign: @@ -489,8 +495,6 @@ else: base_int_type = unsigned_int mask = (2 ** bits) - 1 - if name is None: - raise TypeError('No predefined %sint%d'%(['u', ''][sign], bits)) int_type = type(name, (base_int_type,), {'MASK': mask, 'BITS': bits, 'SIGN': sign}) @@ -541,6 +545,10 @@ else: r_int64 = int +_predefined_ints = { + (True, 64): r_longlong, + (False, 64): r_ulonglong, + } def rstring_to_float(s): if USE_SHORT_FLOAT_REPR: From commits-noreply at bitbucket.org Wed Mar 2 13:20:16 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:20:16 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: in external function calls, automatic cast from SIZE_T to lltype.Unsigned Message-ID: <20110302122016.83B252A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42381:c444392d2e35 Date: 2011-03-02 11:34 +0100 http://bitbucket.org/pypy/pypy/changeset/c444392d2e35/ Log: in external function calls, automatic cast from SIZE_T to lltype.Unsigned diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -222,7 +222,7 @@ if rarithmetic.r_int is not r_int: if result is INT: return cast(lltype.Signed, res) - elif result is UINT: + elif result is UINT or result is SIZE_T: return cast(lltype.Unsigned, res) return res wrapper._annspecialcase_ = 'specialize:ll' From commits-noreply at bitbucket.org Wed Mar 2 13:55:23 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:55:23 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: module/cpyext: s/INT_real/INT/g Message-ID: <20110302125523.E712C282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42382:2a1fa9b997c0 Date: 2011-03-02 13:27 +0100 http://bitbucket.org/pypy/pypy/changeset/2a1fa9b997c0/ Log: module/cpyext: s/INT_real/INT/g diff --git a/pypy/module/cpyext/stubsactive.py b/pypy/module/cpyext/stubsactive.py --- a/pypy/module/cpyext/stubsactive.py +++ b/pypy/module/cpyext/stubsactive.py @@ -58,7 +58,7 @@ compile time.""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def Py_MakePendingCalls(space): return 0 diff --git a/pypy/module/cpyext/sequence.py b/pypy/module/cpyext/sequence.py --- a/pypy/module/cpyext/sequence.py +++ b/pypy/module/cpyext/sequence.py @@ -18,7 +18,7 @@ """ return space.mul(w_obj, space.wrap(count)) - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PySequence_Check(space, w_obj): """Return 1 if the object provides sequence protocol, and 0 otherwise. This function always succeeds.""" @@ -81,14 +81,14 @@ failure. This is the equivalent of the Python expression o[i1:i2].""" return space.getslice(w_obj, space.wrap(start), space.wrap(end)) - at cpython_api([PyObject, Py_ssize_t, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, Py_ssize_t, PyObject], rffi.INT, error=-1) def PySequence_SetSlice(space, w_obj, start, end, w_value): """Assign the sequence object v to the slice in sequence object o from i1 to i2. This is the equivalent of the Python statement o[i1:i2] = v.""" space.setslice(w_obj, space.wrap(start), space.wrap(end), w_value) return 0 - at cpython_api([PyObject, Py_ssize_t, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, Py_ssize_t], rffi.INT, error=-1) def PySequence_DelSlice(space, w_obj, start, end): """Delete the slice in sequence object o from i1 to i2. Returns -1 on failure. This is the equivalent of the Python statement del o[i1:i2].""" @@ -121,7 +121,7 @@ This is the equivalent of the Python expression o1 + o2.""" return space.add(w_o1, w_o2) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PySequence_Contains(space, w_obj, w_value): """Determine if o contains value. If an item in o is equal to value, return 1, otherwise return 0. On error, return -1. This is @@ -137,7 +137,7 @@ """ return space.iter(w_seq) - at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT, error=-1) def PySequence_SetItem(space, w_o, i, w_v): """Assign object v to the ith element of o. Returns -1 on failure. This is the equivalent of the Python statement o[i] = v. This function does @@ -148,7 +148,7 @@ space.setitem(w_o, space.wrap(i), w_v) return 0 - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PySequence_DelItem(space, w_o, i): """Delete the ith element of object o. Returns -1 on failure. This is the equivalent of the Python statement del o[i].""" diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -466,9 +466,9 @@ w_type = get_w_type(space) return space.is_w(w_obj_type, w_type) - check = cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)( + check = cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL)( func_with_new_name(check, check_name)) - check_exact = cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL)( + check_exact = cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL)( func_with_new_name(check_exact, check_name + "Exact")) return check, check_exact diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -154,7 +154,7 @@ space.wrap(msg)) raise OperationError(w_type, w_error) - at cpython_api([], rffi.INT_real, error=-1) + at cpython_api([], rffi.INT, error=-1) def PyErr_CheckSignals(space): """ This function interacts with Python's signal handling. It checks whether a @@ -168,7 +168,7 @@ # XXX implement me return 0 - at cpython_api([PyObject, PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) def PyErr_GivenExceptionMatches(space, w_given, w_exc): """Return true if the given exception matches the exception in exc. If exc is a class object, this also returns true when given is an instance @@ -181,7 +181,7 @@ w_given_type = w_given return space.exception_match(w_given_type, w_exc) - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyErr_ExceptionMatches(space, w_exc): """Equivalent to PyErr_GivenExceptionMatches(PyErr_Occurred(), exc). This should only be called when an exception is actually set; a memory access @@ -190,7 +190,7 @@ return PyErr_GivenExceptionMatches(space, w_type, w_exc) - at cpython_api([PyObject, CONST_STRING, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING, rffi.INT], rffi.INT, error=-1) def PyErr_WarnEx(space, w_category, message_ptr, stacklevel): """Issue a warning message. The category argument is a warning category (see below) or NULL; the message argument is a message string. stacklevel is a @@ -235,7 +235,7 @@ space.call_function(w_warn, w_message, w_category, w_stacklevel) return 0 - at cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING], rffi.INT, error=-1) def PyErr_Warn(space, w_category, message): """Issue a warning message. The category argument is a warning category (see below) or NULL; the message argument is a message string. The warning will @@ -245,7 +245,7 @@ Deprecated; use PyErr_WarnEx() instead.""" return PyErr_WarnEx(space, w_category, message, 1) - at cpython_api([rffi.INT_real], lltype.Void) + at cpython_api([rffi.INT], lltype.Void) def PyErr_PrintEx(space, set_sys_last_vars): """Print a standard traceback to sys.stderr and clear the error indicator. Call this function only when the error indicator is set. (Otherwise it will @@ -276,7 +276,7 @@ """Alias for PyErr_PrintEx(1).""" PyErr_PrintEx(space, 1) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyTraceBack_Print(space, w_tb, w_file): space.call_method(w_file, "write", space.wrap( 'Traceback (most recent call last):\n')) diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -465,7 +465,7 @@ pto.c_tp_flags |= Py_TPFLAGS_READY return pto - at cpython_api([PyTypeObjectPtr], rffi.INT_real, error=-1) + at cpython_api([PyTypeObjectPtr], rffi.INT, error=-1) def PyType_Ready(space, pto): if pto.c_tp_flags & Py_TPFLAGS_READY: return 0 @@ -602,7 +602,7 @@ w_dict = space.newdict(from_strdict_shared=w_obj.dict_w) pto.c_tp_dict = make_ref(space, w_dict) - at cpython_api([PyTypeObjectPtr, PyTypeObjectPtr], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyTypeObjectPtr, PyTypeObjectPtr], rffi.INT, error=CANNOT_FAIL) def PyType_IsSubtype(space, a, b): """Return true if a is a subtype of b. """ diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -84,11 +84,11 @@ def _PyObject_GetDictPtr(space, op): return lltype.nullptr(PyObjectP.TO) - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyObject_IsTrue(space, w_obj): return space.is_true(w_obj) - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyObject_Not(space, w_obj): return not space.is_true(w_obj) @@ -107,7 +107,7 @@ name = rffi.charp2str(name_ptr) return space.getattr(w_obj, space.wrap(name)) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) def PyObject_HasAttr(space, w_obj, w_name): try: w_res = operation.hasattr(space, w_obj, w_name) @@ -115,7 +115,7 @@ except OperationError: return 0 - at cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], rffi.INT, error=CANNOT_FAIL) def PyObject_HasAttrString(space, w_obj, name_ptr): try: name = rffi.charp2str(name_ptr) @@ -124,25 +124,25 @@ except OperationError: return 0 - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1) def PyObject_SetAttr(space, w_obj, w_name, w_value): operation.setattr(space, w_obj, w_name, w_value) return 0 - at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT, error=-1) def PyObject_SetAttrString(space, w_obj, name_ptr, w_value): w_name = space.wrap(rffi.charp2str(name_ptr)) operation.setattr(space, w_obj, w_name, w_value) return 0 - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyObject_DelAttr(space, w_obj, w_name): """Delete attribute named attr_name, for object o. Returns -1 on failure. This is the equivalent of the Python statement del o.attr_name.""" space.delattr(w_obj, w_name) return 0 - at cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING], rffi.INT, error=-1) def PyObject_DelAttrString(space, w_obj, name_ptr): """Delete attribute named attr_name, for object o. Returns -1 on failure. This is the equivalent of the Python statement del o.attr_name.""" @@ -158,7 +158,7 @@ def PyObject_Size(space, w_obj): return space.len_w(w_obj) - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyCallable_Check(space, w_obj): """Determine if the object o is callable. Return 1 if the object is callable and 0 otherwise. This function always succeeds.""" @@ -170,14 +170,14 @@ This is the equivalent of the Python expression o[key].""" return space.getitem(w_obj, w_key) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1) def PyObject_SetItem(space, w_obj, w_key, w_value): """Map the object key to the value v. Returns -1 on failure. This is the equivalent of the Python statement o[key] = v.""" space.setitem(w_obj, w_key, w_value) return 0 - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyObject_DelItem(space, w_obj, w_key): """Delete the mapping for key from o. Returns -1 on failure. This is the equivalent of the Python statement del o[key].""" @@ -246,7 +246,7 @@ function.""" return space.call_function(space.w_unicode, w_obj) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyObject_Compare(space, w_o1, w_o2): """ Compare the values of o1 and o2 using a routine provided by o1, if one @@ -256,7 +256,7 @@ expression cmp(o1, o2).""" return space.int_w(space.cmp(w_o1, w_o2)) - at cpython_api([PyObject, PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, PyObject, rffi.INT], PyObject) def PyObject_RichCompare(space, w_o1, w_o2, opid_int): """Compare the values of o1 and o2 using the operation specified by opid, which must be one of Py_LT, Py_LE, Py_EQ, @@ -273,7 +273,7 @@ if opid == Py_GE: return space.ge(w_o1, w_o2) PyErr_BadInternalCall(space) - at cpython_api([PyObject, PyObject, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, rffi.INT], rffi.INT, error=-1) def PyObject_RichCompareBool(space, ref1, ref2, opid): """Compare the values of o1 and o2 using the operation specified by opid, which must be one of Py_LT, Py_LE, Py_EQ, @@ -303,7 +303,7 @@ w_descr = object_getattribute(space) return space.get_and_call_function(w_descr, w_obj, w_name) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1) def PyObject_GenericSetAttr(space, w_obj, w_name, w_value): """Generic attribute setter function that is meant to be put into a type object's tp_setattro slot. It looks for a data descriptor in the @@ -320,7 +320,7 @@ space.get_and_call_function(w_descr, w_obj, w_name) return 0 - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyObject_IsInstance(space, w_inst, w_cls): """Returns 1 if inst is an instance of the class cls or a subclass of cls, or 0 if not. On error, returns -1 and sets an exception. If @@ -335,7 +335,7 @@ from pypy.module.__builtin__.abstractinst import abstract_isinstance_w return abstract_isinstance_w(space, w_inst, w_cls) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyObject_IsSubclass(space, w_derived, w_cls): """Returns 1 if the class derived is identical to or derived from the class cls, otherwise returns 0. In case of an error, returns -1. If cls @@ -346,7 +346,7 @@ from pypy.module.__builtin__.abstractinst import abstract_issubclass_w return abstract_issubclass_w(space, w_derived, w_cls) - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyObject_AsFileDescriptor(space, w_obj): """Derives a file descriptor from a Python object. If the object is an integer or long integer, its value is returned. If not, the object's @@ -371,7 +371,7 @@ space.w_ValueError, space.wrap( "file descriptor cannot be a negative integer")) - return rffi.cast(rffi.INT_real, fd) + return rffi.cast(rffi.INT, fd) @cpython_api([PyObject], lltype.Signed, error=-1) @@ -381,7 +381,7 @@ This is the equivalent of the Python expression hash(o).""" return space.int_w(space.hash(w_obj)) - at cpython_api([PyObject, rffi.CCHARPP, Py_ssize_tP], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARPP, Py_ssize_tP], rffi.INT, error=-1) def PyObject_AsCharBuffer(space, obj, bufferp, sizep): """Returns a pointer to a read-only memory location usable as character-based input. The obj argument must support the single-segment @@ -409,7 +409,7 @@ # Also in include/object.h Py_PRINT_RAW = 1 # No string quotes etc. - at cpython_api([PyObject, FILEP, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, FILEP, rffi.INT], rffi.INT, error=-1) def PyObject_Print(space, w_obj, fp, flags): """Print an object o, on file fp. Returns -1 on error. The flags argument is used to enable certain printing options. The only option currently @@ -431,7 +431,7 @@ PyFile_Check, PyFile_CheckExact = build_type_checkers("File", W_File) - at cpython_api([PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, rffi.INT], PyObject) def PyFile_GetLine(space, w_obj, n): """ Equivalent to p.readline([n]), this function reads one line from the diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py --- a/pypy/module/cpyext/intobject.py +++ b/pypy/module/cpyext/intobject.py @@ -74,7 +74,7 @@ """ return space.wrap(ival) # XXX this is wrong on win64 - at cpython_api([CONST_STRING, rffi.CCHARPP, rffi.INT_real], PyObject) + at cpython_api([CONST_STRING, rffi.CCHARPP, rffi.INT], PyObject) def PyInt_FromString(space, str, pend, base): """Return a new PyIntObject or PyLongObject based on the string value in str, which is interpreted according to the radix in base. If diff --git a/pypy/module/cpyext/dictobject.py b/pypy/module/cpyext/dictobject.py --- a/pypy/module/cpyext/dictobject.py +++ b/pypy/module/cpyext/dictobject.py @@ -21,7 +21,7 @@ return None return borrow_from(w_dict, w_res) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1) def PyDict_SetItem(space, w_dict, w_key, w_obj): if PyDict_Check(space, w_dict): space.setitem(w_dict, w_key, w_obj) @@ -29,7 +29,7 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyDict_DelItem(space, w_dict, w_key): if PyDict_Check(space, w_dict): space.delitem(w_dict, w_key) @@ -37,7 +37,7 @@ else: PyErr_BadInternalCall(space) - at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT, error=-1) def PyDict_SetItemString(space, w_dict, key_ptr, w_obj): if PyDict_Check(space, w_dict): key = rffi.charp2str(key_ptr) @@ -60,7 +60,7 @@ return None return borrow_from(w_dict, w_res) - at cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARP], rffi.INT, error=-1) def PyDict_DelItemString(space, w_dict, key_ptr): """Remove the entry in dictionary p which has a key specified by the string key. Return 0 on success or -1 on failure.""" @@ -80,7 +80,7 @@ len(p) on a dictionary.""" return space.len_w(w_obj) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyDict_Contains(space, w_obj, w_value): """Determine if dictionary p contains key. If an item in p is matches key, return 1, otherwise return 0. On error, return -1. @@ -100,7 +100,7 @@ """ return space.call_method(w_obj, "copy") - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyDict_Update(space, w_obj, w_other): """This is the same as PyDict_Merge(a, b, 1) in C, or a.update(b) in Python. Return 0 on success or -1 if an exception was raised. @@ -126,7 +126,7 @@ dictionary, as in the dictionary method dict.items().""" return space.call_method(w_obj, "items") - at cpython_api([PyObject, Py_ssize_tP, PyObjectP, PyObjectP], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, Py_ssize_tP, PyObjectP, PyObjectP], rffi.INT, error=CANNOT_FAIL) def PyDict_Next(space, w_dict, ppos, pkey, pvalue): """Iterate over all key-value pairs in the dictionary p. The Py_ssize_t referred to by ppos must be initialized to 0 diff --git a/pypy/module/cpyext/methodobject.py b/pypy/module/cpyext/methodobject.py --- a/pypy/module/cpyext/methodobject.py +++ b/pypy/module/cpyext/methodobject.py @@ -24,7 +24,7 @@ 'PyMethodDef', [('ml_name', rffi.CCHARP), ('ml_meth', PyCFunction_typedef), - ('ml_flags', rffi.INT_real), + ('ml_flags', rffi.INT), ('ml_doc', rffi.CCHARP), ]) diff --git a/pypy/module/cpyext/typeobjectdefs.py b/pypy/module/cpyext/typeobjectdefs.py --- a/pypy/module/cpyext/typeobjectdefs.py +++ b/pypy/module/cpyext/typeobjectdefs.py @@ -13,44 +13,44 @@ freefunc = P(FT([rffi.VOIDP], Void)) destructor = P(FT([PyO], Void)) -printfunc = P(FT([PyO, FILEP, rffi.INT_real], rffi.INT)) +printfunc = P(FT([PyO, FILEP, rffi.INT], rffi.INT)) getattrfunc = P(FT([PyO, rffi.CCHARP], PyO)) getattrofunc = P(FT([PyO, PyO], PyO)) -setattrfunc = P(FT([PyO, rffi.CCHARP, PyO], rffi.INT_real)) -setattrofunc = P(FT([PyO, PyO, PyO], rffi.INT_real)) -cmpfunc = P(FT([PyO, PyO], rffi.INT_real)) +setattrfunc = P(FT([PyO, rffi.CCHARP, PyO], rffi.INT)) +setattrofunc = P(FT([PyO, PyO, PyO], rffi.INT)) +cmpfunc = P(FT([PyO, PyO], rffi.INT)) reprfunc = P(FT([PyO], PyO)) hashfunc = P(FT([PyO], lltype.Signed)) -richcmpfunc = P(FT([PyO, PyO, rffi.INT_real], PyO)) +richcmpfunc = P(FT([PyO, PyO, rffi.INT], PyO)) getiterfunc = P(FT([PyO], PyO)) iternextfunc = P(FT([PyO], PyO)) descrgetfunc = P(FT([PyO, PyO, PyO], PyO)) -descrsetfunc = P(FT([PyO, PyO, PyO], rffi.INT_real)) -initproc = P(FT([PyO, PyO, PyO], rffi.INT_real)) +descrsetfunc = P(FT([PyO, PyO, PyO], rffi.INT)) +initproc = P(FT([PyO, PyO, PyO], rffi.INT)) newfunc = P(FT([PyTypeObjectPtr, PyO, PyO], PyO)) allocfunc = P(FT([PyTypeObjectPtr, Py_ssize_t], PyO)) unaryfunc = P(FT([PyO], PyO)) binaryfunc = P(FT([PyO, PyO], PyO)) ternaryfunc = P(FT([PyO, PyO, PyO], PyO)) -inquiry = P(FT([PyO], rffi.INT_real)) +inquiry = P(FT([PyO], rffi.INT)) lenfunc = P(FT([PyO], Py_ssize_t)) -coercion = P(FT([PyOPtr, PyOPtr], rffi.INT_real)) -intargfunc = P(FT([PyO, rffi.INT_real], PyO)) -intintargfunc = P(FT([PyO, rffi.INT_real, rffi.INT], PyO)) +coercion = P(FT([PyOPtr, PyOPtr], rffi.INT)) +intargfunc = P(FT([PyO, rffi.INT], PyO)) +intintargfunc = P(FT([PyO, rffi.INT, rffi.INT], PyO)) ssizeargfunc = P(FT([PyO, Py_ssize_t], PyO)) ssizessizeargfunc = P(FT([PyO, Py_ssize_t, Py_ssize_t], PyO)) -intobjargproc = P(FT([PyO, rffi.INT_real, PyO], rffi.INT)) -intintobjargproc = P(FT([PyO, rffi.INT_real, rffi.INT, PyO], rffi.INT)) -ssizeobjargproc = P(FT([PyO, Py_ssize_t, PyO], rffi.INT_real)) -ssizessizeobjargproc = P(FT([PyO, Py_ssize_t, Py_ssize_t, PyO], rffi.INT_real)) -objobjargproc = P(FT([PyO, PyO, PyO], rffi.INT_real)) +intobjargproc = P(FT([PyO, rffi.INT, PyO], rffi.INT)) +intintobjargproc = P(FT([PyO, rffi.INT, rffi.INT, PyO], rffi.INT)) +ssizeobjargproc = P(FT([PyO, Py_ssize_t, PyO], rffi.INT)) +ssizessizeobjargproc = P(FT([PyO, Py_ssize_t, Py_ssize_t, PyO], rffi.INT)) +objobjargproc = P(FT([PyO, PyO, PyO], rffi.INT)) -objobjproc = P(FT([PyO, PyO], rffi.INT_real)) -visitproc = P(FT([PyO, rffi.VOIDP], rffi.INT_real)) -traverseproc = P(FT([PyO, visitproc, rffi.VOIDP], rffi.INT_real)) +objobjproc = P(FT([PyO, PyO], rffi.INT)) +visitproc = P(FT([PyO, rffi.VOIDP], rffi.INT)) +traverseproc = P(FT([PyO, visitproc, rffi.VOIDP], rffi.INT)) getter = P(FT([PyO, rffi.VOIDP], PyO)) -setter = P(FT([PyO, PyO, rffi.VOIDP], rffi.INT_real)) +setter = P(FT([PyO, PyO, rffi.VOIDP], rffi.INT)) wrapperfunc = P(FT([PyO, PyO, rffi.VOIDP], PyO)) wrapperfunc_kwds = P(FT([PyO, PyO, rffi.VOIDP, PyO], PyO)) @@ -140,9 +140,9 @@ PyMemberDef = cpython_struct("PyMemberDef", ( ("name", rffi.CCHARP), - ("type", rffi.INT_real), + ("type", rffi.INT), ("offset", Py_ssize_t), - ("flags", rffi.INT_real), + ("flags", rffi.INT), ("doc", rffi.CCHARP), )) diff --git a/pypy/module/cpyext/tupleobject.py b/pypy/module/cpyext/tupleobject.py --- a/pypy/module/cpyext/tupleobject.py +++ b/pypy/module/cpyext/tupleobject.py @@ -14,7 +14,7 @@ def PyTuple_New(space, size): return space.newtuple([space.w_None] * size) - at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT, error=-1) def PyTuple_SetItem(space, w_t, pos, w_obj): if not PyTuple_Check(space, w_t): # XXX this should also steal a reference, test it!!! @@ -48,7 +48,7 @@ return PyTuple_GET_SIZE(space, ref) - at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObjectP, Py_ssize_t], rffi.INT, error=-1) def _PyTuple_Resize(space, ref, newsize): """Can be used to resize a tuple. newsize will be the new length of the tuple. Because tuples are supposed to be immutable, this should only be used if there diff --git a/pypy/module/cpyext/test/test_longobject.py b/pypy/module/cpyext/test/test_longobject.py --- a/pypy/module/cpyext/test/test_longobject.py +++ b/pypy/module/cpyext/test/test_longobject.py @@ -68,7 +68,7 @@ space.wrap(1<<64)) == 0 def test_as_long_and_overflow(self, space, api): - overflow = lltype.malloc(rffi.CArrayPtr(rffi.INT_real).TO, 1, flavor='raw') + overflow = lltype.malloc(rffi.CArrayPtr(rffi.INT).TO, 1, flavor='raw') assert api.PyLong_AsLongAndOverflow( space.wrap(sys.maxint), overflow) == sys.maxint assert api.PyLong_AsLongAndOverflow( @@ -78,7 +78,7 @@ lltype.free(overflow, flavor='raw') def test_as_longlong_and_overflow(self, space, api): - overflow = lltype.malloc(rffi.CArrayPtr(rffi.INT_real).TO, 1, flavor='raw') + overflow = lltype.malloc(rffi.CArrayPtr(rffi.INT).TO, 1, flavor='raw') assert api.PyLong_AsLongLongAndOverflow( space.wrap(1<<62), overflow) == 1<<62 assert api.PyLong_AsLongLongAndOverflow( diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py --- a/pypy/module/cpyext/longobject.py +++ b/pypy/module/cpyext/longobject.py @@ -79,7 +79,7 @@ num = space.bigint_w(w_long) return num.ulonglongmask() - at cpython_api([PyObject, rffi.CArrayPtr(rffi.INT_real)], lltype.Signed, + at cpython_api([PyObject, rffi.CArrayPtr(rffi.INT)], lltype.Signed, error=-1) def PyLong_AsLongAndOverflow(space, w_long, overflow_ptr): """ @@ -88,19 +88,19 @@ respectively, and return -1; otherwise, set *overflow to 0. If any other exception occurs (for example a TypeError or MemoryError), then -1 will be returned and *overflow will be 0.""" - overflow_ptr[0] = rffi.cast(rffi.INT_real, 0) + overflow_ptr[0] = rffi.cast(rffi.INT, 0) try: return space.int_w(w_long) except OperationError, e: if not e.match(space, space.w_OverflowError): raise if space.is_true(space.gt(w_long, space.wrap(0))): - overflow_ptr[0] = rffi.cast(rffi.INT_real, 1) + overflow_ptr[0] = rffi.cast(rffi.INT, 1) else: - overflow_ptr[0] = rffi.cast(rffi.INT_real, -1) + overflow_ptr[0] = rffi.cast(rffi.INT, -1) return -1 - at cpython_api([PyObject, rffi.CArrayPtr(rffi.INT_real)], rffi.LONGLONG, + at cpython_api([PyObject, rffi.CArrayPtr(rffi.INT)], rffi.LONGLONG, error=-1) def PyLong_AsLongLongAndOverflow(space, w_long, overflow_ptr): """ @@ -109,16 +109,16 @@ -1, respectively, and return -1; otherwise, set *overflow to 0. If any other exception occurs (for example a TypeError or MemoryError), then -1 will be returned and *overflow will be 0.""" - overflow_ptr[0] = rffi.cast(rffi.INT_real, 0) + overflow_ptr[0] = rffi.cast(rffi.INT, 0) try: return rffi.cast(rffi.LONGLONG, space.r_longlong_w(w_long)) except OperationError, e: if not e.match(space, space.w_OverflowError): raise if space.is_true(space.gt(w_long, space.wrap(0))): - overflow_ptr[0] = rffi.cast(rffi.INT_real, 1) + overflow_ptr[0] = rffi.cast(rffi.INT, 1) else: - overflow_ptr[0] = rffi.cast(rffi.INT_real, -1) + overflow_ptr[0] = rffi.cast(rffi.INT, -1) return -1 @cpython_api([lltype.Float], PyObject) @@ -133,7 +133,7 @@ OverflowError exception is raised and -1.0 will be returned.""" return space.float_w(space.float(w_long)) - at cpython_api([CONST_STRING, rffi.CCHARPP, rffi.INT_real], PyObject) + at cpython_api([CONST_STRING, rffi.CCHARPP, rffi.INT], PyObject) def PyLong_FromString(space, str, pend, base): """Return a new PyLongObject based on the string value in str, which is interpreted according to the radix in base. If pend is non-NULL, @@ -172,7 +172,7 @@ def _PyLong_NumBits(space, w_long): return space.uint_w(space.call_method(w_long, "bit_length")) - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def _PyLong_Sign(space, w_long): assert isinstance(w_long, W_LongObject) return w_long.num.sign diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -34,7 +34,7 @@ # This is actually the Py_InitModule4 function, # renamed to refuse modules built against CPython headers. @cpython_api([CONST_STRING, lltype.Ptr(PyMethodDef), CONST_STRING, - PyObject, rffi.INT_real], PyObject) + PyObject, rffi.INT], PyObject) def _Py_InitPyPyModule(space, name, methods, doc, w_self, apiver): """ Create a new module object based on a name and table of functions, returning @@ -102,7 +102,7 @@ dict_w[methodname] = w_obj - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyModule_Check(space, w_obj): w_type = space.gettypeobject(Module.typedef) w_obj_type = space.type(w_obj) diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -84,7 +84,7 @@ w_code = compiling.compile(space, w_source, filename, mode) return compiling.eval(space, w_code, w_globals, w_locals) - at cpython_api([CONST_STRING, rffi.INT_real,PyObject, PyObject], PyObject) + at cpython_api([CONST_STRING, rffi.INT,PyObject, PyObject], PyObject) def PyRun_String(space, source, start, w_globals, w_locals): """This is a simplified interface to PyRun_StringFlags() below, leaving flags set to NULL.""" @@ -92,7 +92,7 @@ filename = "" return run_string(space, source, filename, start, w_globals, w_locals) - at cpython_api([FILEP, CONST_STRING, rffi.INT_real, PyObject, PyObject], PyObject) + at cpython_api([FILEP, CONST_STRING, rffi.INT, PyObject, PyObject], PyObject) def PyRun_File(space, fp, filename, start, w_globals, w_locals): """This is a simplified interface to PyRun_FileExFlags() below, leaving closeit set to 0 and flags set to NULL.""" @@ -113,7 +113,7 @@ return run_string(space, source, filename, start, w_globals, w_locals) # Undocumented function! - at cpython_api([PyObject, Py_ssize_tP], rffi.INT_real, error=0) + at cpython_api([PyObject, Py_ssize_tP], rffi.INT, error=0) def _PyEval_SliceIndex(space, w_obj, pi): """Extract a slice index from a PyInt or PyLong or an object with the nb_index slot defined, and store in *pi. diff --git a/pypy/module/cpyext/cdatetime.py b/pypy/module/cpyext/cdatetime.py --- a/pypy/module/cpyext/cdatetime.py +++ b/pypy/module/cpyext/cdatetime.py @@ -54,7 +54,7 @@ # Check functions def make_check_function(func_name, type_name): - @cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + @cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) @func_renamer(func_name) def check(space, w_obj): try: @@ -66,7 +66,7 @@ except OperationError: return 0 - @cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + @cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) @func_renamer(func_name + "Exact") def check_exact(space, w_obj): try: @@ -85,7 +85,7 @@ # Constructors - at cpython_api([rffi.INT_real, rffi.INT_real, rffi.INT_real], PyObject) + at cpython_api([rffi.INT, rffi.INT, rffi.INT], PyObject) def PyDate_FromDate(space, year, month, day): """Return a datetime.date object with the specified year, month and day. """ @@ -97,7 +97,7 @@ w_datetime, "date", space.wrap(year), space.wrap(month), space.wrap(day)) - at cpython_api([rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real], PyObject) + at cpython_api([rffi.INT, rffi.INT, rffi.INT, rffi.INT], PyObject) def PyTime_FromTime(space, hour, minute, second, usecond): """Return a ``datetime.time`` object with the specified hour, minute, second and microsecond.""" @@ -111,7 +111,7 @@ space.wrap(hour), space.wrap(minute), space.wrap(second), space.wrap(usecond)) - at cpython_api([rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real], PyObject) + at cpython_api([rffi.INT, rffi.INT, rffi.INT, rffi.INT, rffi.INT, rffi.INT, rffi.INT], PyObject) def PyDateTime_FromDateAndTime(space, year, month, day, hour, minute, second, usecond): """Return a datetime.datetime object with the specified year, month, day, hour, minute, second and microsecond. @@ -150,7 +150,7 @@ w_method = space.getattr(w_type, space.wrap("fromtimestamp")) return space.call(w_method, w_args) - at cpython_api([rffi.INT_real, rffi.INT_real, rffi.INT_real], PyObject) + at cpython_api([rffi.INT, rffi.INT, rffi.INT], PyObject) def PyDelta_FromDSU(space, days, seconds, useconds): """Return a datetime.timedelta object representing the given number of days, seconds and microseconds. Normalization is performed so that the resulting @@ -167,67 +167,67 @@ # Accessors - at cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Date], rffi.INT, error=CANNOT_FAIL) def PyDateTime_GET_YEAR(space, w_obj): """Return the year, as a positive int. """ return space.int_w(space.getattr(w_obj, space.wrap("year"))) - at cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Date], rffi.INT, error=CANNOT_FAIL) def PyDateTime_GET_MONTH(space, w_obj): """Return the month, as an int from 1 through 12. """ return space.int_w(space.getattr(w_obj, space.wrap("month"))) - at cpython_api([PyDateTime_Date], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Date], rffi.INT, error=CANNOT_FAIL) def PyDateTime_GET_DAY(space, w_obj): """Return the day, as an int from 1 through 31. """ return space.int_w(space.getattr(w_obj, space.wrap("day"))) - at cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_DateTime], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DATE_GET_HOUR(space, w_obj): """Return the hour, as an int from 0 through 23. """ return space.int_w(space.getattr(w_obj, space.wrap("hour"))) - at cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_DateTime], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DATE_GET_MINUTE(space, w_obj): """Return the minute, as an int from 0 through 59. """ return space.int_w(space.getattr(w_obj, space.wrap("minute"))) - at cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_DateTime], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DATE_GET_SECOND(space, w_obj): """Return the second, as an int from 0 through 59. """ return space.int_w(space.getattr(w_obj, space.wrap("second"))) - at cpython_api([PyDateTime_DateTime], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_DateTime], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DATE_GET_MICROSECOND(space, w_obj): """Return the microsecond, as an int from 0 through 999999. """ return space.int_w(space.getattr(w_obj, space.wrap("microsecond"))) - at cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Time], rffi.INT, error=CANNOT_FAIL) def PyDateTime_TIME_GET_HOUR(space, w_obj): """Return the hour, as an int from 0 through 23. """ return space.int_w(space.getattr(w_obj, space.wrap("hour"))) - at cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Time], rffi.INT, error=CANNOT_FAIL) def PyDateTime_TIME_GET_MINUTE(space, w_obj): """Return the minute, as an int from 0 through 59. """ return space.int_w(space.getattr(w_obj, space.wrap("minute"))) - at cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Time], rffi.INT, error=CANNOT_FAIL) def PyDateTime_TIME_GET_SECOND(space, w_obj): """Return the second, as an int from 0 through 59. """ return space.int_w(space.getattr(w_obj, space.wrap("second"))) - at cpython_api([PyDateTime_Time], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Time], rffi.INT, error=CANNOT_FAIL) def PyDateTime_TIME_GET_MICROSECOND(space, w_obj): """Return the microsecond, as an int from 0 through 999999. """ @@ -237,14 +237,14 @@ # But it does not seem possible to expose a different structure # for types defined in a python module like lib/datetime.py. - at cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Delta], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DELTA_GET_DAYS(space, w_obj): return space.int_w(space.getattr(w_obj, space.wrap("days"))) - at cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Delta], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DELTA_GET_SECONDS(space, w_obj): return space.int_w(space.getattr(w_obj, space.wrap("seconds"))) - at cpython_api([PyDateTime_Delta], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyDateTime_Delta], rffi.INT, error=CANNOT_FAIL) def PyDateTime_DELTA_GET_MICROSECONDS(space, w_obj): return space.int_w(space.getattr(w_obj, space.wrap("microseconds"))) diff --git a/pypy/module/cpyext/listobject.py b/pypy/module/cpyext/listobject.py --- a/pypy/module/cpyext/listobject.py +++ b/pypy/module/cpyext/listobject.py @@ -21,7 +21,7 @@ """ return space.newlist([None] * len) - at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT, error=-1) def PyList_SetItem(space, w_list, index, w_item): """Set the item at index index in list to item. Return 0 on success or -1 on failure. @@ -54,14 +54,14 @@ return borrow_from(w_list, wrappeditems[index]) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyList_Append(space, w_list, w_item): if not isinstance(w_list, W_ListObject): PyErr_BadInternalCall(space) w_list.append(w_item) return 0 - at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, PyObject], rffi.INT, error=-1) def PyList_Insert(space, w_list, index, w_item): """Insert the item item into list list in front of index index. Return 0 if successful; return -1 and set an exception if unsuccessful. @@ -93,7 +93,7 @@ tuple(list).""" return space.call_function(space.w_tuple, w_list) - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyList_Sort(space, w_list): """Sort the items of list in place. Return 0 on success, -1 on failure. This is equivalent to list.sort().""" @@ -102,7 +102,7 @@ space.call_method(w_list, "sort") return 0 - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyList_Reverse(space, w_list): """Reverse the items of list in place. Return 0 on success, -1 on failure. This is the equivalent of list.reverse().""" @@ -111,7 +111,7 @@ space.call_method(w_list, "reverse") return 0 - at cpython_api([PyObject, Py_ssize_t, Py_ssize_t, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t, Py_ssize_t, PyObject], rffi.INT, error=-1) def PyList_SetSlice(space, w_list, low, high, w_sequence): """Set the slice of list between low and high to the contents of itemlist. Analogous to list[low:high] = itemlist. The itemlist may diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -34,12 +34,12 @@ def _PyObject_Del(space, op): raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyObject_CheckBuffer(space, obj): """Return 1 if obj supports the buffer interface otherwise 0.""" raise NotImplementedError - at cpython_api([PyObject, Py_buffer, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_buffer, rffi.INT], rffi.INT, error=-1) def PyObject_GetBuffer(space, obj, view, flags): """Export obj into a Py_buffer, view. These arguments must never be NULL. The flags argument is a bit field indicating what @@ -185,21 +185,21 @@ ~Py_buffer.format.""" raise NotImplementedError - at cpython_api([Py_buffer, lltype.Char], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_buffer, lltype.Char], rffi.INT, error=CANNOT_FAIL) def PyBuffer_IsContiguous(space, view, fortran): """Return 1 if the memory defined by the view is C-style (fortran is 'C') or Fortran-style (fortran is 'F') contiguous or either one (fortran is 'A'). Return 0 otherwise.""" raise NotImplementedError - at cpython_api([rffi.INT_real, Py_ssize_t, Py_ssize_t, Py_ssize_t, lltype.Char], lltype.Void) + at cpython_api([rffi.INT, Py_ssize_t, Py_ssize_t, Py_ssize_t, lltype.Char], lltype.Void) def PyBuffer_FillContiguousStrides(space, ndim, shape, strides, itemsize, fortran): """Fill the strides array with byte-strides of a contiguous (C-style if fortran is 'C' or Fortran-style if fortran is 'F' array of the given shape with the given number of bytes per element.""" raise NotImplementedError - at cpython_api([Py_buffer, PyObject, rffi.VOIDP, Py_ssize_t, rffi.INT_real, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([Py_buffer, PyObject, rffi.VOIDP, Py_ssize_t, rffi.INT, rffi.INT], rffi.INT, error=-1) def PyBuffer_FillInfo(space, view, obj, buf, len, readonly, infoflags): """Fill in a buffer-info structure, view, correctly for an exporter that can only share a contiguous chunk of memory of "unsigned bytes" of the given @@ -214,7 +214,7 @@ memoryview object.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real, lltype.Char], PyObject) + at cpython_api([PyObject, rffi.INT, lltype.Char], PyObject) def PyMemoryView_GetContiguous(space, obj, buffertype, order): """Create a memoryview object to a contiguous chunk of memory (in either 'C' or 'F'ortran order) from an object that defines the buffer @@ -223,7 +223,7 @@ new bytes object.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyMemoryView_Check(space, obj): """Return true if the object obj is a memoryview object. It is not currently allowed to create subclasses of memoryview.""" @@ -236,13 +236,13 @@ check its type, you must do it yourself or you will risk crashes.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyByteArray_Check(space, o): """Return true if the object o is a bytearray object or an instance of a subtype of the bytearray type.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyByteArray_CheckExact(space, o): """Return true if the object o is a bytearray object, but not an instance of a subtype of the bytearray type.""" @@ -278,7 +278,7 @@ NULL pointer.""" raise NotImplementedError - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PyByteArray_Resize(space, bytearray, len): """Resize the internal buffer of bytearray to len.""" raise NotImplementedError @@ -293,7 +293,7 @@ """Macro version of PyByteArray_Size().""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyCell_Check(space, ob): """Return true if ob is a cell object; ob must not be NULL.""" raise NotImplementedError @@ -316,7 +316,7 @@ borrow_from() raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyCell_Set(space, cell, value): """Set the contents of the cell object cell to value. This releases the reference to any current content of the cell. value may be NULL. cell @@ -331,7 +331,7 @@ be a cell object.""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) def PyClass_IsSubclass(space, klass, base): """Return true if klass is a subclass of base. Return false in all other cases.""" raise NotImplementedError @@ -342,17 +342,17 @@ used as the positional and keyword parameters to the object's constructor.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyCode_Check(space, co): """Return true if co is a code object""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyCode_GetNumFree(space, co): """Return the number of free variables in co.""" raise NotImplementedError - at cpython_api([rffi.INT_real, rffi.INT_real, rffi.INT_real, rffi.INT_real, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, rffi.INT_real, PyObject], PyCodeObject) + at cpython_api([rffi.INT, rffi.INT, rffi.INT, rffi.INT, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, PyObject, rffi.INT, PyObject], PyCodeObject) def PyCode_New(space, argcount, nlocals, stacksize, flags, code, consts, names, varnames, freevars, cellvars, filename, name, firstlineno, lnotab): """Return a new code object. If you need a dummy code object to create a frame, use PyCode_NewEmpty() instead. Calling @@ -360,7 +360,7 @@ version since the definition of the bytecode changes often.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PyCodec_Register(space, search_function): """Register a new codec search function. @@ -408,7 +408,7 @@ """Get a StreamWriter factory function for the given encoding.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, PyObject], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP, PyObject], rffi.INT, error=-1) def PyCodec_RegisterError(space, name, error): """Register the error handling callback function error under the given name. This callback function will be called by a codec when it encounters @@ -563,7 +563,7 @@ instead.""" raise NotImplementedError - at cpython_api([rffi.DOUBLE, lltype.Char, rffi.INT_real, rffi.INT_real, rffi.INTP], rffi.CCHARP) + at cpython_api([rffi.DOUBLE, lltype.Char, rffi.INT, rffi.INT, rffi.INTP], rffi.CCHARP) def PyOS_double_to_string(space, val, format_code, precision, flags, ptype): """Convert a double val to a string using supplied format_code, precision, and flags. @@ -619,14 +619,14 @@ """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyTZInfo_Check(space, ob): """Return true if ob is of type PyDateTime_TZInfoType or a subtype of PyDateTime_TZInfoType. ob must not be NULL. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyTZInfo_CheckExact(space, ob): """Return true if ob is of type PyDateTime_TZInfoType. ob must not be NULL. @@ -653,7 +653,7 @@ def PyDescr_NewClassMethod(space, type, method): raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyDescr_IsData(space, descr): """Return true if the descriptor objects descr describes a data attribute, or false if it describes a method. descr must be a descriptor object; there is @@ -673,7 +673,7 @@ """ raise NotImplementedError - at cpython_api([PyObject, PyObject, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, rffi.INT], rffi.INT, error=-1) def PyDict_Merge(space, a, b, override): """Iterate over mapping object b adding key-value pairs to dictionary a. b may be a dictionary, or any object supporting PyMapping_Keys() @@ -684,7 +684,7 @@ """ raise NotImplementedError - at cpython_api([PyObject, PyObject, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, rffi.INT], rffi.INT, error=-1) def PyDict_MergeFromSeq2(space, a, seq2, override): """Update or merge into dictionary a, from the key-value pairs in seq2. seq2 must be an iterable object producing iterable objects of length 2, @@ -700,7 +700,7 @@ """ raise NotImplementedError - at cpython_api([rffi.INT_real], PyObject) + at cpython_api([rffi.INT], PyObject) def PyErr_SetFromWindowsErr(space, ierr): """This is a convenience function to raise WindowsError. If called with ierr of 0, the error code returned by a call to GetLastError() @@ -713,7 +713,7 @@ Return value: always NULL.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, rffi.INT], PyObject) def PyErr_SetExcFromWindowsErr(space, type, ierr): """Similar to PyErr_SetFromWindowsErr(), with an additional parameter specifying the exception type to be raised. Availability: Windows. @@ -721,7 +721,7 @@ Return value: always NULL.""" raise NotImplementedError - at cpython_api([rffi.INT_real, rffi.CCHARP], PyObject) + at cpython_api([rffi.INT, rffi.CCHARP], PyObject) def PyErr_SetFromWindowsErrWithFilename(space, ierr, filename): """Similar to PyErr_SetFromWindowsErr(), with the additional behavior that if filename is not NULL, it is passed to the constructor of @@ -729,7 +729,7 @@ Return value: always NULL.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real, rffi.CCHARP], PyObject) + at cpython_api([PyObject, rffi.INT, rffi.CCHARP], PyObject) def PyErr_SetExcFromWindowsErrWithFilename(space, type, ierr, filename): """Similar to PyErr_SetFromWindowsErrWithFilename(), with an additional parameter specifying the exception type to be raised. Availability: Windows. @@ -737,7 +737,7 @@ Return value: always NULL.""" raise NotImplementedError - at cpython_api([PyObject, rffi.CCHARP, rffi.CCHARP, rffi.INT_real, rffi.CCHARP, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARP, rffi.CCHARP, rffi.INT, rffi.CCHARP, PyObject], rffi.INT, error=-1) def PyErr_WarnExplicit(space, category, message, filename, lineno, module, registry): """Issue a warning message with explicit control over all warning attributes. This is a straightforward wrapper around the Python function @@ -753,7 +753,7 @@ It may be called without holding the interpreter lock.""" raise NotImplementedError - at cpython_api([rffi.INT_real], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([rffi.INT], rffi.INT, error=CANNOT_FAIL) def PySignal_SetWakeupFd(space, fd): """This utility function specifies a file descriptor to which a '\0' byte will be written whenever a signal is received. It returns the previous such file @@ -791,27 +791,27 @@ """Return the object attribute of the given exception object.""" raise NotImplementedError - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PyUnicodeDecodeError_GetStart(space, exc, start): """Get the start attribute of the given exception object and place it into *start. start must not be NULL. Return 0 on success, -1 on failure.""" raise NotImplementedError - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PyUnicodeDecodeError_SetStart(space, exc, start): """Set the start attribute of the given exception object to start. Return 0 on success, -1 on failure.""" raise NotImplementedError - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PyUnicodeDecodeError_GetEnd(space, exc, end): """Get the end attribute of the given exception object and place it into *end. end must not be NULL. Return 0 on success, -1 on failure.""" raise NotImplementedError - at cpython_api([PyObject, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObject, Py_ssize_t], rffi.INT, error=-1) def PyUnicodeDecodeError_SetEnd(space, exc, end): """Set the end attribute of the given exception object to end. Return 0 on success, -1 on failure.""" @@ -822,13 +822,13 @@ """Return the reason attribute of the given exception object.""" raise NotImplementedError - at cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARP], rffi.INT, error=-1) def PyUnicodeDecodeError_SetReason(space, exc, reason): """Set the reason attribute of the given exception object to reason. Return 0 on success, -1 on failure.""" raise NotImplementedError - at cpython_api([rffi.CCHARP], rffi.INT_real, error=1) + at cpython_api([rffi.CCHARP], rffi.INT, error=1) def Py_EnterRecursiveCall(space, where): """Marks a point where a recursive C-level call is about to be performed. @@ -851,7 +851,7 @@ successful invocation of Py_EnterRecursiveCall().""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.CCHARP, rffi.INT_real], PyObject) + at cpython_api([FILE, rffi.CCHARP, rffi.CCHARP, rffi.INT], PyObject) def PyFile_FromFile(space, fp, name, mode, close): """Create a new PyFileObject from the already-open standard C file pointer, fp. The function close will be called when the file should be @@ -900,27 +900,27 @@ borrow_from() raise NotImplementedError - at cpython_api([PyFileObject, rffi.INT_real], lltype.Void) + at cpython_api([PyFileObject, rffi.INT], lltype.Void) def PyFile_SetBufSize(space, p, n): """Available on systems with setvbuf() only. This should only be called immediately after file object creation.""" raise NotImplementedError - at cpython_api([PyFileObject, rffi.CCHARP], rffi.INT_real, error=0) + at cpython_api([PyFileObject, rffi.CCHARP], rffi.INT, error=0) def PyFile_SetEncoding(space, p, enc): """Set the file's encoding for Unicode output to enc. Return 1 on success and 0 on failure. """ raise NotImplementedError - at cpython_api([PyFileObject, rffi.CCHARP, rffi.CCHARP], rffi.INT_real, error=0) + at cpython_api([PyFileObject, rffi.CCHARP, rffi.CCHARP], rffi.INT, error=0) def PyFile_SetEncodingAndErrors(space, p, enc, errors): """Set the file's encoding for Unicode output to enc, and its error mode to err. Return 1 on success and 0 on failure. """ raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, rffi.INT], rffi.INT, error=CANNOT_FAIL) def PyFile_SoftSpace(space, p, newflag): """ This function exists for internal use by the interpreter. Set the @@ -933,7 +933,7 @@ but doing so should not be needed.""" raise NotImplementedError - at cpython_api([PyObject, PyObject, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, rffi.INT], rffi.INT, error=-1) def PyFile_WriteObject(space, obj, p, flags): """ Write object obj to file object p. The only supported flag for flags is @@ -942,7 +942,7 @@ appropriate exception will be set.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, PyObject], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP, PyObject], rffi.INT, error=-1) def PyFile_WriteString(space, s, p): """Write string s to file object p. Return 0 on success or -1 on failure; the appropriate exception will be set.""" @@ -968,7 +968,7 @@ """ raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyFloat_ClearFreeList(space): """Clear the float free list. Return the number of items that could not be freed. @@ -1033,7 +1033,7 @@ borrow_from() raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyFunction_SetDefaults(space, op, defaults): """Set the argument default values for the function object op. defaults must be Py_None or a tuple. @@ -1048,7 +1048,7 @@ borrow_from() raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyFunction_SetClosure(space, op, closure): """Set the closure associated with the function object op. closure must be Py_None or a tuple of cell objects. @@ -1086,12 +1086,12 @@ extension modules.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyGen_Check(space, ob): """Return true if ob is a generator object; ob must not be NULL.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyGen_CheckExact(space, ob): """Return true if ob's type is PyGen_Type is a generator object; ob must not be NULL.""" @@ -1122,7 +1122,7 @@ -1 as level, meaning relative import.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, PyObject, PyObject, PyObject, rffi.INT_real], PyObject) + at cpython_api([rffi.CCHARP, PyObject, PyObject, PyObject, rffi.INT], PyObject) def PyImport_ImportModuleLevel(space, name, globals, locals, fromlist, level): """Import a module. This is best described by referring to the built-in Python function __import__(), as the standard __import__() function calls @@ -1239,7 +1239,7 @@ """For internal use only.""" raise NotImplementedError - at cpython_api([rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP], rffi.INT, error=-1) def PyImport_ImportFrozenModule(space, name): """Load a frozen module named name. Return 1 for success, 0 if the module is not found, and -1 with an exception set if the initialization @@ -1248,7 +1248,7 @@ reload the module if it was already imported.)""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.VOIDP], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP, rffi.VOIDP], rffi.INT, error=-1) def PyImport_AppendInittab(space, name, initfunc): """Add a single module to the existing table of built-in modules. This is a convenience wrapper around PyImport_ExtendInittab(), returning -1 if @@ -1258,7 +1258,7 @@ Py_Initialize().""" raise NotImplementedError - at cpython_api([_inittab], rffi.INT_real, error=-1) + at cpython_api([_inittab], rffi.INT, error=-1) def PyImport_ExtendInittab(space, newtab): """Add a collection of modules to the table of built-in modules. The newtab array must end with a sentinel entry which contains NULL for the name @@ -1282,7 +1282,7 @@ fails.""" raise NotImplementedError - at cpython_api([rffi.INT_real], lltype.Void) + at cpython_api([rffi.INT], lltype.Void) def Py_InitializeEx(space, initsigs): """This function works like Py_Initialize() if initsigs is 1. If initsigs is 0, it skips initialization registration of signal handlers, which @@ -1462,7 +1462,7 @@ sys.version.""" raise NotImplementedError - at cpython_api([rffi.INT_real, rffi.CCHARPP, rffi.INT_real], lltype.Void) + at cpython_api([rffi.INT, rffi.CCHARPP, rffi.INT], lltype.Void) def PySys_SetArgvEx(space, argc, argv, updatepath): """Set sys.argv based on argc and argv. These parameters are similar to those passed to the program's main() function with the difference that the @@ -1500,7 +1500,7 @@ check w/ Guido.""" raise NotImplementedError - at cpython_api([rffi.INT_real, rffi.CCHARPP], lltype.Void) + at cpython_api([rffi.INT, rffi.CCHARPP], lltype.Void) def PySys_SetArgv(space, argc, argv): """This function works like PySys_SetArgvEx() with updatepath set to 1.""" raise NotImplementedError @@ -1564,7 +1564,7 @@ borrow_from() raise NotImplementedError - at cpython_api([lltype.Signed, PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([lltype.Signed, PyObject], rffi.INT, error=CANNOT_FAIL) def PyThreadState_SetAsyncExc(space, id, exc): """Asynchronously raise an exception in a thread. The id argument is the thread id of the target thread; exc is the exception object to be raised. This @@ -1777,20 +1777,20 @@ as defined in the system header files).""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyInt_ClearFreeList(space): """Clear the integer free list. Return the number of items that could not be freed. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PySeqIter_Check(space, op): """Return true if the type of op is PySeqIter_Type. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyCallIter_Check(space, op): """Return true if the type of op is PyCallIter_Type. """ @@ -1821,7 +1821,7 @@ """ raise NotImplementedError - at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT_real], PyObject) + at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT], PyObject) def PyLong_FromUnicode(space, u, length, base): """Convert a sequence of Unicode digits to a Python long integer value. The first parameter, u, points to the first character of the Unicode string, length @@ -1841,19 +1841,19 @@ """ raise NotImplementedError - at cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARP], rffi.INT, error=-1) def PyMapping_DelItemString(space, o, key): """Remove the mapping for object key from the object o. Return -1 on failure. This is equivalent to the Python statement del o[key].""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyMapping_DelItem(space, o, key): """Remove the mapping for object key from the object o. Return -1 on failure. This is equivalent to the Python statement del o[key].""" raise NotImplementedError - at cpython_api([lltype.Signed, FILE, rffi.INT_real], lltype.Void) + at cpython_api([lltype.Signed, FILE, rffi.INT], lltype.Void) def PyMarshal_WriteLongToFile(space, value, file, version): """Marshal a long integer, value, to file. This will only write the least-significant 32 bits of value; regardless of the size of the @@ -1862,14 +1862,14 @@ version indicates the file format.""" raise NotImplementedError - at cpython_api([PyObject, FILE, rffi.INT_real], lltype.Void) + at cpython_api([PyObject, FILE, rffi.INT], lltype.Void) def PyMarshal_WriteObjectToFile(space, value, file, version): """Marshal a Python object, value, to file. version indicates the file format.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, rffi.INT], PyObject) def PyMarshal_WriteObjectToString(space, value, version): """Return a string object containing the marshalled representation of value. @@ -1883,7 +1883,7 @@ regardless of the native size of long.""" raise NotImplementedError - at cpython_api([FILE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([FILE], rffi.INT, error=CANNOT_FAIL) def PyMarshal_ReadShortFromFile(space, file): """Return a C short from the data stream in a FILE* opened for reading. Only a 16-bit value can be read in using this function, @@ -1920,13 +1920,13 @@ changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyMethod_ClearFreeList(space): """Clear the free list. Return the total number of freed items. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyModule_CheckExact(space, p): """Return true if p is a module object, but not a subtype of PyModule_Type. @@ -1947,7 +1947,7 @@ SystemError and return NULL.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.INT], rffi.INT, error=-1) def PyModule_AddIntMacro(space, module, macro): """Add an int constant to module. The name and the value are taken from macro. For example PyModule_AddConstant(module, AF_INET) adds the int @@ -1956,13 +1956,13 @@ """ raise NotImplementedError - at cpython_api([PyObject, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARP], rffi.INT, error=-1) def PyModule_AddStringMacro(space, module, macro): """Add a string constant to module. """ raise NotImplementedError - at cpython_api([PyObjectP, PyObjectP], rffi.INT_real, error=-1) + at cpython_api([PyObjectP, PyObjectP], rffi.INT, error=-1) def PyNumber_Coerce(space, p1, p2): """This function takes the addresses of two variables of type PyObject*. If the objects pointed to by *p1 and *p2 have the same type, increment their @@ -1974,7 +1974,7 @@ Python statement o1, o2 = coerce(o1, o2).""" raise NotImplementedError - at cpython_api([PyObjectP, PyObjectP], rffi.INT_real, error=-1) + at cpython_api([PyObjectP, PyObjectP], rffi.INT, error=-1) def PyNumber_CoerceEx(space, p1, p2): """This function is similar to PyNumber_Coerce(), except that it returns 1 when the conversion is not possible and when no error is raised. @@ -1988,7 +1988,7 @@ """ raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, rffi.INT], PyObject) def PyNumber_ToBase(space, n, base): """Returns the integer n converted to base as a string with a base marker of '0b', '0o', or '0x' if applicable. When @@ -1998,7 +1998,7 @@ """ raise NotImplementedError - at cpython_api([PyObject, PyObject, rffi.INTP], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, rffi.INTP], rffi.INT, error=-1) def PyObject_Cmp(space, o1, o2, result): """Compare the values of o1 and o2 using a routine provided by o1, if one exists, otherwise with a routine provided by o2. The result of the @@ -2037,12 +2037,12 @@ borrow_from() raise NotImplementedError - at cpython_api([PyFrameObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyFrameObject], rffi.INT, error=CANNOT_FAIL) def PyFrame_GetLineNumber(space, frame): """Return the line number that frame is currently executing.""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyEval_GetRestricted(space): """If there is a current frame and it is executing in restricted mode, return true, otherwise false.""" @@ -2121,32 +2121,32 @@ changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PySet_Check(space, p): """Return true if p is a set object or an instance of a subtype. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyFrozenSet_Check(space, p): """Return true if p is a frozenset object or an instance of a subtype. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyAnySet_Check(space, p): """Return true if p is a set object, a frozenset object, or an instance of a subtype.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyAnySet_CheckExact(space, p): """Return true if p is a set object or a frozenset object but not an instance of a subtype.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyFrozenSet_CheckExact(space, p): """Return true if p is a frozenset object but not an instance of a subtype.""" @@ -2188,7 +2188,7 @@ """Macro form of PySet_Size() without error checking.""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PySet_Contains(space, anyset, key): """Return 1 if found, 0 if not found, and -1 if an error is encountered. Unlike the Python __contains__() method, this function does not automatically @@ -2197,7 +2197,7 @@ set, frozenset, or an instance of a subtype.""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PySet_Add(space, set, key): """Add key to a set instance. Does not apply to frozenset instances. Return 0 on success or -1 on failure. Raise a TypeError if @@ -2210,7 +2210,7 @@ values of brand new frozensets before they are exposed to other code.""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PySet_Discard(space, set, key): """Return 1 if found and removed, 0 if not found (no action taken), and -1 if an error is encountered. Does not raise KeyError for missing keys. Raise a @@ -2228,7 +2228,7 @@ set or its subtype.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject], rffi.INT, error=-1) def PySet_Clear(space, set): """Empty an existing set of all elements.""" raise NotImplementedError @@ -2287,7 +2287,7 @@ changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([FILE, rffi.CCHARP], rffi.INT, error=CANNOT_FAIL) def Py_FdIsInteractive(space, fp, filename): """Return true (nonzero) if the standard I/O file fp with name filename is deemed interactive. This is the case for files for which isatty(fileno(fp)) @@ -2304,7 +2304,7 @@ to be called.""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyOS_CheckStack(space): """Return true when the interpreter runs out of stack space. This is a reliable check, but is only available when USE_STACKCHECK is defined (currently @@ -2313,7 +2313,7 @@ own code.""" raise NotImplementedError - at cpython_api([rffi.INT_real], PyOS_sighandler_t) + at cpython_api([rffi.INT], PyOS_sighandler_t) def PyOS_getsig(space, i): """Return the current signal handler for signal i. This is a thin wrapper around either sigaction() or signal(). Do not call those functions @@ -2321,7 +2321,7 @@ (*)(int).""" raise NotImplementedError - at cpython_api([rffi.INT_real, PyOS_sighandler_t], PyOS_sighandler_t) + at cpython_api([rffi.INT, PyOS_sighandler_t], PyOS_sighandler_t) def PyOS_setsig(space, i, h): """Set the signal handler for signal i to be h; return the old signal handler. This is a thin wrapper around either sigaction() or signal(). Do @@ -2375,13 +2375,13 @@ """As above, but write to sys.stderr or stderr instead.""" raise NotImplementedError - at cpython_api([rffi.INT_real], lltype.Void) + at cpython_api([rffi.INT], lltype.Void) def Py_Exit(space, status): """Exit the current process. This calls Py_Finalize() and then calls the standard C library function exit(status).""" raise NotImplementedError - at cpython_api([rffi.VOIDP], rffi.INT_real, error=-1) + at cpython_api([rffi.VOIDP], rffi.INT, error=-1) def Py_AtExit(space, func): """Register a cleanup function to be called by Py_Finalize(). The cleanup function will be called with no arguments and should return no value. At @@ -2402,7 +2402,7 @@ require changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyTuple_ClearFreeList(space): """Clear the free list. Return the total number of freed items. """ @@ -2422,35 +2422,35 @@ """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyType_IS_GC(space, o): """Return true if the type object includes support for the cycle detector; this tests the type flag Py_TPFLAGS_HAVE_GC. """ raise NotImplementedError - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyUnicode_ClearFreeList(space): """Clear the free list. Return the total number of freed items. """ raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISTITLE(space, ch): """Return 1 or 0 depending on whether ch is a titlecase character.""" raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISDIGIT(space, ch): """Return 1 or 0 depending on whether ch is a digit character.""" raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISNUMERIC(space, ch): """Return 1 or 0 depending on whether ch is a numeric character.""" raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISALPHA(space, ch): """Return 1 or 0 depending on whether ch is an alphabetic character.""" raise NotImplementedError @@ -2460,13 +2460,13 @@ """Return the character ch converted to title case.""" raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_TODECIMAL(space, ch): """Return the character ch converted to a decimal positive integer. Return -1 if this is not possible. This macro does not raise exceptions.""" raise NotImplementedError - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_TODIGIT(space, ch): """Return the character ch converted to a single digit integer. Return -1 if this is not possible. This macro does not raise exceptions.""" @@ -2694,7 +2694,7 @@ """ raise NotImplementedError - at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.CCHARP, rffi.INT_real], PyObject) + at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.CCHARP, rffi.INT], PyObject) def PyUnicode_EncodeUTF32(space, s, size, errors, byteorder): """Return a Python bytes object holding the UTF-32 encoded value of the Unicode data in s. Output is written according to the following byte order: @@ -2734,7 +2734,7 @@ properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.CCHARP, rffi.INT_real], PyObject) + at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.CCHARP, rffi.INT], PyObject) def PyUnicode_EncodeUTF16(space, s, size, errors, byteorder): """Return a Python string object holding the UTF-16 encoded value of the Unicode data in s. Output is written according to the following byte order: @@ -2777,7 +2777,7 @@ bytes that have been decoded will be stored in consumed.""" raise NotImplementedError - at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT_real, rffi.INT_real, rffi.CCHARP], PyObject) + at cpython_api([rffi.CWCHARP, Py_ssize_t, rffi.INT, rffi.INT, rffi.CCHARP], PyObject) def PyUnicode_EncodeUTF7(space, s, size, base64SetO, base64WhiteSpace, errors): """Encode the Py_UNICODE buffer of the given size using UTF-7 and return a Python bytes object. Return NULL if an exception was raised by @@ -2908,7 +2908,7 @@ changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.INT_real, rffi.CCHARP, rffi.INTP], PyObject) + at cpython_api([rffi.CCHARP, rffi.INT, rffi.CCHARP, rffi.INTP], PyObject) def PyUnicode_DecodeMBCSStateful(space, s, size, errors, consumed): """If consumed is NULL, behave like PyUnicode_DecodeMBCS(). If consumed is not NULL, PyUnicode_DecodeMBCSStateful() will not decode @@ -2940,7 +2940,7 @@ changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, rffi.INT], PyObject) def PyUnicode_Splitlines(space, s, keepend): """Split a Unicode string at line breaks, returning a list of Unicode strings. CRLF is considered to be one line break. If keepend is 0, the Line break @@ -2969,7 +2969,7 @@ Unicode string.""" raise NotImplementedError - at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT], rffi.INT, error=-1) def PyUnicode_Tailmatch(space, str, substr, start, end, direction): """Return 1 if substr matches str*[*start:end] at the given tail end (direction == -1 means to do a prefix match, direction == 1 a suffix match), @@ -2980,7 +2980,7 @@ systems.""" raise NotImplementedError - at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT_real], Py_ssize_t, error=-2) + at cpython_api([PyObject, PyObject, Py_ssize_t, Py_ssize_t, rffi.INT], Py_ssize_t, error=-2) def PyUnicode_Find(space, str, substr, start, end, direction): """Return the first position of substr in str*[*start:end] using the given direction (direction == 1 means to do a forward search, direction == -1 a @@ -3013,7 +3013,7 @@ require changes in your code for properly supporting 64-bit systems.""" raise NotImplementedError - at cpython_api([PyObject, PyObject, rffi.INT_real], PyObject) + at cpython_api([PyObject, PyObject, rffi.INT], PyObject) def PyUnicode_RichCompare(space, left, right, op): """Rich compare two unicode strings and return one of the following: @@ -3037,7 +3037,7 @@ format % args. The args argument must be a tuple.""" raise NotImplementedError - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, PyObject], rffi.INT, error=-1) def PyUnicode_Contains(space, container, element): """Check whether element is contained in container and return true or false accordingly. @@ -3046,7 +3046,7 @@ there was an error.""" raise NotImplementedError - at cpython_api([rffi.INT_real, rffi.CCHARPP], rffi.INT_real, error=2) + at cpython_api([rffi.INT, rffi.CCHARPP], rffi.INT, error=2) def Py_Main(space, argc, argv): """The main program for the standard interpreter. This is made available for programs which embed Python. The argc and argv parameters should be @@ -3062,25 +3062,25 @@ Py_InspectFlag is not set.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP], rffi.INT, error=-1) def PyRun_AnyFile(space, fp, filename): """This is a simplified interface to PyRun_AnyFileExFlags() below, leaving closeit set to 0 and flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT, error=-1) def PyRun_AnyFileFlags(space, fp, filename, flags): """This is a simplified interface to PyRun_AnyFileExFlags() below, leaving the closeit argument set to 0.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, rffi.INT], rffi.INT, error=-1) def PyRun_AnyFileEx(space, fp, filename, closeit): """This is a simplified interface to PyRun_AnyFileExFlags() below, leaving the flags argument set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, PyCompilerFlags], rffi.INT, error=-1) def PyRun_AnyFileExFlags(space, fp, filename, closeit, flags): """If fp refers to a file associated with an interactive device (console or terminal input or Unix pseudo-terminal), return the value of @@ -3089,13 +3089,13 @@ "???" as the filename.""" raise NotImplementedError - at cpython_api([rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP], rffi.INT, error=-1) def PyRun_SimpleString(space, command): """This is a simplified interface to PyRun_SimpleStringFlags() below, leaving the PyCompilerFlags* argument set to NULL.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([rffi.CCHARP, PyCompilerFlags], rffi.INT, error=-1) def PyRun_SimpleStringFlags(space, command, flags): """Executes the Python source code from command in the __main__ module according to the flags argument. If __main__ does not already exist, it @@ -3108,25 +3108,25 @@ Py_InspectFlag is not set.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP], rffi.INT, error=-1) def PyRun_SimpleFile(space, fp, filename): """This is a simplified interface to PyRun_SimpleFileExFlags() below, leaving closeit set to 0 and flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT, error=-1) def PyRun_SimpleFileFlags(space, fp, filename, flags): """This is a simplified interface to PyRun_SimpleFileExFlags() below, leaving closeit set to 0.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, rffi.INT], rffi.INT, error=-1) def PyRun_SimpleFileEx(space, fp, filename, closeit): """This is a simplified interface to PyRun_SimpleFileExFlags() below, leaving flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, PyCompilerFlags], rffi.INT, error=-1) def PyRun_SimpleFileExFlags(space, fp, filename, closeit, flags): """Similar to PyRun_SimpleStringFlags(), but the Python source code is read from fp instead of an in-memory string. filename should be the name of the @@ -3134,13 +3134,13 @@ returns.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP], rffi.INT, error=-1) def PyRun_InteractiveOne(space, fp, filename): """This is a simplified interface to PyRun_InteractiveOneFlags() below, leaving flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT, error=-1) def PyRun_InteractiveOneFlags(space, fp, filename, flags): """Read and execute a single statement from a file associated with an interactive device according to the flags argument. The user will be @@ -3151,34 +3151,34 @@ Python.h, so must be included specifically if needed.)""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP], rffi.INT, error=-1) def PyRun_InteractiveLoop(space, fp, filename): """This is a simplified interface to PyRun_InteractiveLoopFlags() below, leaving flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) + at cpython_api([FILE, rffi.CCHARP, PyCompilerFlags], rffi.INT, error=-1) def PyRun_InteractiveLoopFlags(space, fp, filename, flags): """Read and execute statements from a file associated with an interactive device until EOF is reached. The user will be prompted using sys.ps1 and sys.ps2. Returns 0 at EOF.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.INT_real], _node) + at cpython_api([rffi.CCHARP, rffi.INT], _node) def PyParser_SimpleParseString(space, str, start): """This is a simplified interface to PyParser_SimpleParseStringFlagsFilename() below, leaving filename set to NULL and flags set to 0.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.INT_real, rffi.INT_real], _node) + at cpython_api([rffi.CCHARP, rffi.INT, rffi.INT], _node) def PyParser_SimpleParseStringFlags(space, str, start, flags): """This is a simplified interface to PyParser_SimpleParseStringFlagsFilename() below, leaving filename set to NULL.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT_real, rffi.INT_real], _node) + at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT, rffi.INT], _node) def PyParser_SimpleParseStringFlagsFilename(space, str, filename, start, flags): """Parse Python source code from str using the start token start according to the flags argument. The result can be used to create a code object which can @@ -3186,19 +3186,19 @@ many times.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real], _node) + at cpython_api([FILE, rffi.CCHARP, rffi.INT], _node) def PyParser_SimpleParseFile(space, fp, filename, start): """This is a simplified interface to PyParser_SimpleParseFileFlags() below, leaving flags set to 0""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, rffi.INT_real], _node) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, rffi.INT], _node) def PyParser_SimpleParseFileFlags(space, fp, filename, start, flags): """Similar to PyParser_SimpleParseStringFlagsFilename(), but the Python source code is read from fp instead of an in-memory string.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.INT_real, PyObject, PyObject, PyCompilerFlags], PyObject) + at cpython_api([rffi.CCHARP, rffi.INT, PyObject, PyObject, PyCompilerFlags], PyObject) def PyRun_StringFlags(space, str, start, globals, locals, flags): """Execute Python source code from str in the context specified by the dictionaries globals and locals with the compiler flags specified by @@ -3209,19 +3209,19 @@ exception was raised.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyObject, PyObject, rffi.INT_real], PyObject) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, PyObject, PyObject, rffi.INT], PyObject) def PyRun_FileEx(space, fp, filename, start, globals, locals, closeit): """This is a simplified interface to PyRun_FileExFlags() below, leaving flags set to NULL.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyObject, PyObject, PyCompilerFlags], PyObject) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, PyObject, PyObject, PyCompilerFlags], PyObject) def PyRun_FileFlags(space, fp, filename, start, globals, locals, flags): """This is a simplified interface to PyRun_FileExFlags() below, leaving closeit set to 0.""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.INT_real, PyObject, PyObject, rffi.INT_real, PyCompilerFlags], PyObject) + at cpython_api([FILE, rffi.CCHARP, rffi.INT, PyObject, PyObject, rffi.INT, PyCompilerFlags], PyObject) def PyRun_FileExFlags(space, fp, filename, start, globals, locals, closeit, flags): """Similar to PyRun_StringFlags(), but the Python source code is read from fp instead of an in-memory string. filename should be the name of the file. @@ -3229,13 +3229,13 @@ returns.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT_real], PyObject) + at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT], PyObject) def Py_CompileString(space, str, filename, start): """This is a simplified interface to Py_CompileStringFlags() below, leaving flags set to NULL.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT_real, PyCompilerFlags], PyObject) + at cpython_api([rffi.CCHARP, rffi.CCHARP, rffi.INT, PyCompilerFlags], PyObject) def Py_CompileStringFlags(space, str, filename, start, flags): """Parse and compile the Python source code in str, returning the resulting code object. The start token is given by start; this can be used to constrain the @@ -3253,7 +3253,7 @@ The other arguments are set to NULL.""" raise NotImplementedError - at cpython_api([PyCodeObject, PyObject, PyObject, PyObjectP, rffi.INT_real, PyObjectP, rffi.INT_real, PyObjectP, rffi.INT_real, PyObject], PyObject) + at cpython_api([PyCodeObject, PyObject, PyObject, PyObjectP, rffi.INT, PyObjectP, rffi.INT, PyObjectP, rffi.INT, PyObject], PyObject) def PyEval_EvalCodeEx(space, co, globals, locals, args, argcount, kws, kwcount, defs, defcount, closure): """Evaluate a precompiled code object, given a particular environment for its evaluation. This environment consists of dictionaries of global and local @@ -3267,7 +3267,7 @@ PyEval_EvalFrameEx, for backward compatibility.""" raise NotImplementedError - at cpython_api([PyFrameObject, rffi.INT_real], PyObject) + at cpython_api([PyFrameObject, rffi.INT], PyObject) def PyEval_EvalFrameEx(space, f, throwflag): """This is the main, unvarnished function of Python interpretation. It is literally 2000 lines long. The code object associated with the execution @@ -3277,25 +3277,25 @@ throw() methods of generator objects.""" raise NotImplementedError - at cpython_api([PyCompilerFlags], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyCompilerFlags], rffi.INT, error=CANNOT_FAIL) def PyEval_MergeCompilerFlags(space, cf): """This function changes the flags of the current evaluation frame, and returns true on success, false on failure.""" raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyWeakref_Check(space, ob): """Return true if ob is either a reference or proxy object. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyWeakref_CheckRef(space, ob): """Return true if ob is a reference object. """ raise NotImplementedError - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyWeakref_CheckProxy(space, ob): """Return true if ob is a proxy object. """ diff --git a/pypy/module/cpyext/pythonrun.py b/pypy/module/cpyext/pythonrun.py --- a/pypy/module/cpyext/pythonrun.py +++ b/pypy/module/cpyext/pythonrun.py @@ -2,7 +2,7 @@ from pypy.module.cpyext.api import cpython_api, CANNOT_FAIL from pypy.module.cpyext.state import State - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def Py_IsInitialized(space): return 1 diff --git a/pypy/module/cpyext/stubgen.py b/pypy/module/cpyext/stubgen.py --- a/pypy/module/cpyext/stubgen.py +++ b/pypy/module/cpyext/stubgen.py @@ -15,7 +15,7 @@ C_TYPE_TO_PYPY_TYPE = { "void": "lltype.Void", - "int": "rffi.INT_real", + "int": "rffi.INT", "PyTypeObject*": "PyTypeObjectPtr", "PyVarObject*": "PyObject", "const char*": "rffi.CCHARP", diff --git a/pypy/module/cpyext/number.py b/pypy/module/cpyext/number.py --- a/pypy/module/cpyext/number.py +++ b/pypy/module/cpyext/number.py @@ -4,7 +4,7 @@ from pypy.rpython.lltypesystem import rffi, lltype from pypy.tool.sourcetools import func_with_new_name - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyIndex_Check(space, w_obj): """Returns True if o is an index integer (has the nb_index slot of the tp_as_number structure filled in). @@ -15,7 +15,7 @@ except OperationError: return 0 - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyNumber_Check(space, w_obj): """Returns 1 if the object o provides numeric protocols, and false otherwise. This function always succeeds.""" diff --git a/pypy/module/cpyext/iterator.py b/pypy/module/cpyext/iterator.py --- a/pypy/module/cpyext/iterator.py +++ b/pypy/module/cpyext/iterator.py @@ -36,7 +36,7 @@ raise return None - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyIter_Check(space, w_obj): """Return true if the object o supports the iterator protocol.""" try: diff --git a/pypy/module/cpyext/structmember.py b/pypy/module/cpyext/structmember.py --- a/pypy/module/cpyext/structmember.py +++ b/pypy/module/cpyext/structmember.py @@ -82,7 +82,7 @@ return w_result - at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, lltype.Ptr(PyMemberDef), PyObject], rffi.INT, error=-1) def PyMember_SetOne(space, obj, w_member, w_value): addr = rffi.cast(ADDR, obj) addr += w_member.c_offset diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -52,32 +52,32 @@ from pypy.module.cpyext.object import PyObject_dealloc PyObject_dealloc(space, py_obj) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISSPACE(space, ch): """Return 1 or 0 depending on whether ch is a whitespace character.""" return unicodedb.isspace(ord(ch)) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISALNUM(space, ch): """Return 1 or 0 depending on whether ch is an alphanumeric character.""" return unicodedb.isalnum(ord(ch)) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISLINEBREAK(space, ch): """Return 1 or 0 depending on whether ch is a linebreak character.""" return unicodedb.islinebreak(ord(ch)) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISDECIMAL(space, ch): """Return 1 or 0 depending on whether ch is a decimal character.""" return unicodedb.isdecimal(ord(ch)) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISLOWER(space, ch): """Return 1 or 0 depending on whether ch is a lowercase character.""" return unicodedb.islower(ord(ch)) - at cpython_api([Py_UNICODE], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([Py_UNICODE], rffi.INT, error=CANNOT_FAIL) def Py_UNICODE_ISUPPER(space, ch): """Return 1 or 0 depending on whether ch is an uppercase character.""" return unicodedb.isupper(ord(ch)) @@ -179,7 +179,7 @@ i += 1 return default_encoding - at cpython_api([CONST_STRING], rffi.INT_real, error=-1) + at cpython_api([CONST_STRING], rffi.INT, error=-1) def PyUnicode_SetDefaultEncoding(space, encoding): """Sets the currently active default encoding. Returns 0 on success, -1 in case of an error.""" @@ -442,7 +442,7 @@ w_errors = space.w_None return space.call_method(w_str, 'decode', w_encoding, w_errors) - at cpython_api([PyObject, PyObject], rffi.INT_real, error=-2) + at cpython_api([PyObject, PyObject], rffi.INT, error=-2) def PyUnicode_Compare(space, w_left, w_right): """Compare two strings and return -1, 0, 1 for less than, equal, and greater than, respectively.""" diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -138,7 +138,7 @@ ref_str.c_buffer = rffi.str2charp(s) return ref_str.c_buffer - at cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT_real, error=-1) + at cpython_api([PyObject, rffi.CCHARPP, rffi.CArrayPtr(Py_ssize_t)], rffi.INT, error=-1) def PyString_AsStringAndSize(space, ref, buffer, length): if not PyString_Check(space, ref): raise OperationError(space.w_TypeError, space.wrap( @@ -170,7 +170,7 @@ w_obj = from_ref(space, ref) return space.len_w(w_obj) - at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) + at cpython_api([PyObjectP, Py_ssize_t], rffi.INT, error=-1) def _PyString_Resize(space, ref, newsize): """A way to resize a string object even though it is "immutable". Only use this to build up a brand new string object; don't use this if the string may already be diff --git a/pypy/module/cpyext/mapping.py b/pypy/module/cpyext/mapping.py --- a/pypy/module/cpyext/mapping.py +++ b/pypy/module/cpyext/mapping.py @@ -4,7 +4,7 @@ from pypy.module.cpyext.pyobject import PyObject - at cpython_api([PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject], rffi.INT, error=CANNOT_FAIL) def PyMapping_Check(space, w_obj): """Return 1 if the object provides mapping protocol, and 0 otherwise. This function always succeeds.""" @@ -44,7 +44,7 @@ w_key = space.wrap(rffi.charp2str(key)) return space.getitem(w_obj, w_key) - at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT_real, error=-1) + at cpython_api([PyObject, CONST_STRING, PyObject], rffi.INT, error=-1) def PyMapping_SetItemString(space, w_obj, key, w_value): """Map the object key to the value v in object o. Returns -1 on failure. This is the equivalent of the Python statement o[key] = v.""" @@ -52,7 +52,7 @@ space.setitem(w_obj, w_key, w_value) return 0 - at cpython_api([PyObject, PyObject], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, PyObject], rffi.INT, error=CANNOT_FAIL) def PyMapping_HasKey(space, w_obj, w_key): """Return 1 if the mapping object has the key key and 0 otherwise. This is equivalent to o[key], returning True on success and False @@ -63,7 +63,7 @@ except: return 0 - at cpython_api([PyObject, CONST_STRING], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([PyObject, CONST_STRING], rffi.INT, error=CANNOT_FAIL) def PyMapping_HasKeyString(space, w_obj, key): """Return 1 if the mapping object has the key key and 0 otherwise. This is equivalent to o[key], returning True on success and False diff --git a/pypy/module/cpyext/sliceobject.py b/pypy/module/cpyext/sliceobject.py --- a/pypy/module/cpyext/sliceobject.py +++ b/pypy/module/cpyext/sliceobject.py @@ -65,7 +65,7 @@ return W_SliceObject(w_start, w_stop, w_step) @cpython_api([PySliceObject, Py_ssize_t, Py_ssize_tP, Py_ssize_tP, Py_ssize_tP, - Py_ssize_tP], rffi.INT_real, error=-1) + Py_ssize_tP], rffi.INT, error=-1) def PySlice_GetIndicesEx(space, w_slice, length, start_p, stop_p, step_p, slicelength_p): """Usable replacement for PySlice_GetIndices(). Retrieve the start, @@ -83,7 +83,7 @@ return 0 @cpython_api([PySliceObject, Py_ssize_t, Py_ssize_tP, Py_ssize_tP, Py_ssize_tP], - rffi.INT_real, error=-1) + rffi.INT, error=-1) def PySlice_GetIndices(space, w_slice, length, start_p, stop_p, step_p): """Retrieve the start, stop and step indices from the slice object slice, assuming a sequence of length length. Treats indices greater than diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -194,7 +194,7 @@ args_w = space.fixedview(w_args) other_w = args_w[0] return generic_cpy_call(space, func_target, - w_self, other_w, rffi.cast(rffi.INT_real, OP_CONST)) + w_self, other_w, rffi.cast(rffi.INT, OP_CONST)) return inner richcmp_eq = get_richcmp_func(Py_EQ) @@ -211,7 +211,7 @@ w_args_new = space.newtuple(args_w) return space.call(w_func, w_args_new, w_kwds) - at cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, error=-1, external=False) + at cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1, external=False) def slot_tp_init(space, w_self, w_args, w_kwds): w_descr = space.lookup(w_self, '__init__') args = Arguments.frompacked(space, w_args, w_kwds) @@ -253,7 +253,7 @@ if setattr_fn is None: return - @cpython_api([PyObject, PyObject, PyObject], rffi.INT_real, + @cpython_api([PyObject, PyObject, PyObject], rffi.INT, error=-1, external=True) # XXX should not be exported @func_renamer("cpyext_tp_setattro_%s" % (typedef.name,)) def slot_tp_setattro(space, w_self, w_name, w_value): diff --git a/pypy/module/cpyext/funcobject.py b/pypy/module/cpyext/funcobject.py --- a/pypy/module/cpyext/funcobject.py +++ b/pypy/module/cpyext/funcobject.py @@ -65,7 +65,7 @@ assert isinstance(w_method, Method) return borrow_from(w_method, w_method.w_class) - at cpython_api([CONST_STRING, CONST_STRING, rffi.INT_real], PyObject) + at cpython_api([CONST_STRING, CONST_STRING, rffi.INT], PyObject) def PyCode_NewEmpty(space, filename, funcname, firstlineno): """Creates a new empty code object with the specified source location.""" return space.wrap(PyCode(space, diff --git a/pypy/module/cpyext/sysmodule.py b/pypy/module/cpyext/sysmodule.py --- a/pypy/module/cpyext/sysmodule.py +++ b/pypy/module/cpyext/sysmodule.py @@ -12,7 +12,7 @@ w_obj = space.finditem_str(w_dict, name) return borrow_from(None, w_obj) - at cpython_api([CONST_STRING, PyObject], rffi.INT_real, error=-1) + at cpython_api([CONST_STRING, PyObject], rffi.INT, error=-1) def PySys_SetObject(space, name, w_obj): """Set name in the sys module to v unless v is NULL, in which case name is deleted from the sys module. Returns 0 on success, -1 diff --git a/pypy/module/cpyext/pystate.py b/pypy/module/cpyext/pystate.py --- a/pypy/module/cpyext/pystate.py +++ b/pypy/module/cpyext/pystate.py @@ -30,7 +30,7 @@ def PyEval_InitThreads(space): return - at cpython_api([], rffi.INT_real, error=CANNOT_FAIL) + at cpython_api([], rffi.INT, error=CANNOT_FAIL) def PyEval_ThreadsInitialized(space): return 1 From commits-noreply at bitbucket.org Wed Mar 2 13:55:24 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:55:24 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Translation of rffi.SIZE_T Message-ID: <20110302125524.A3E06282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42383:22f190b92fd5 Date: 2011-03-02 13:29 +0100 http://bitbucket.org/pypy/pypy/changeset/22f190b92fd5/ Log: Translation of rffi.SIZE_T diff --git a/pypy/translator/c/primitive.py b/pypy/translator/c/primitive.py --- a/pypy/translator/c/primitive.py +++ b/pypy/translator/c/primitive.py @@ -214,3 +214,4 @@ define_c_primitive(rffi.ULONG, 'unsigned long', 'UL') define_c_primitive(rffi.LONGLONG, 'long long', 'LL') define_c_primitive(rffi.ULONGLONG, 'unsigned long long', 'ULL') +define_c_primitive(rffi.SIZE_T, 'size_t') From commits-noreply at bitbucket.org Wed Mar 2 13:55:25 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:55:25 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Remove rffi.INT_real Message-ID: <20110302125525.F30F2282D00@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42384:3c9c94fb91ee Date: 2011-03-02 13:31 +0100 http://bitbucket.org/pypy/pypy/changeset/3c9c94fb91ee/ Log: Remove rffi.INT_real diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -405,10 +405,6 @@ NUMBER_TYPES = setup() platform.numbertype_to_rclass[lltype.Signed] = int # avoid "r_long" for common cases -r_int_real = rarithmetic.build_int("r_int_real", r_int.SIGN, r_int.BITS) -INT_real = lltype.build_number("INT", r_int_real) -platform.numbertype_to_rclass[INT_real] = r_int_real -NUMBER_TYPES.append(INT_real) # ^^^ this creates at least the following names: # -------------------------------------------------------------------- diff --git a/pypy/rpython/lltypesystem/ll2ctypes.py b/pypy/rpython/lltypesystem/ll2ctypes.py --- a/pypy/rpython/lltypesystem/ll2ctypes.py +++ b/pypy/rpython/lltypesystem/ll2ctypes.py @@ -100,7 +100,6 @@ rffi.SHORT: ctypes.c_short, rffi.USHORT: ctypes.c_ushort, rffi.INT: ctypes.c_int, - rffi.INT_real: ctypes.c_int, rffi.UINT: ctypes.c_uint, rffi.LONG: ctypes.c_long, rffi.ULONG: ctypes.c_ulong, diff --git a/pypy/translator/c/primitive.py b/pypy/translator/c/primitive.py --- a/pypy/translator/c/primitive.py +++ b/pypy/translator/c/primitive.py @@ -208,7 +208,6 @@ define_c_primitive(rffi.SHORT, 'short') define_c_primitive(rffi.USHORT, 'unsigned short') define_c_primitive(rffi.INT, 'int') -define_c_primitive(rffi.INT_real, 'int') define_c_primitive(rffi.UINT, 'unsigned int') define_c_primitive(rffi.LONG, 'long', 'L') define_c_primitive(rffi.ULONG, 'unsigned long', 'UL') diff --git a/pypy/translator/c/test/test_database.py b/pypy/translator/c/test/test_database.py --- a/pypy/translator/c/test/test_database.py +++ b/pypy/translator/c/test/test_database.py @@ -5,7 +5,7 @@ from pypy.objspace.flow.model import Constant, Variable, SpaceOperation from pypy.objspace.flow.model import Block, Link, FunctionGraph from pypy.rpython.typesystem import getfunctionptr -from pypy.rpython.lltypesystem.rffi import VOIDP, INT_real, INT +from pypy.rpython.lltypesystem.rffi import VOIDP, INT def dump_on_stdout(database): @@ -228,11 +228,13 @@ assert db.gettype(A) == "void *@" def test_intlong_unique(): - A = INT_real - B = Signed + A = INT + B = LONG + C = Signed db = LowLevelDatabase() assert db.gettype(A) == "int @" assert db.gettype(B) == "long @" + assert db.gettype(C) == "long @" # except on win64 def test_recursive_struct(): From commits-noreply at bitbucket.org Wed Mar 2 13:55:27 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:55:27 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Conversion between RPython and the hosting CPython API are now based on the C type. Message-ID: <20110302125527.7C812282D5F@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42385:6d24afdecfcc Date: 2011-03-02 13:52 +0100 http://bitbucket.org/pypy/pypy/changeset/6d24afdecfcc/ Log: Conversion between RPython and the hosting CPython API are now based on the C type. diff --git a/pypy/rpython/rint.py b/pypy/rpython/rint.py --- a/pypy/rpython/rint.py +++ b/pypy/rpython/rint.py @@ -439,30 +439,35 @@ py_to_ll_conversion_functions = { - UnsignedLongLong: ('RPyLong_AsUnsignedLongLong', lambda pyo: r_ulonglong(pyo._obj.value)), - SignedLongLong: ('RPyLong_AsLongLong', lambda pyo: r_longlong(pyo._obj.value)), - Unsigned: ('RPyLong_AsUnsignedLong', lambda pyo: r_uint(pyo._obj.value)), - Signed: ('PyInt_AsLong', lambda pyo: int(pyo._obj.value)) + 'ULONGLONG': ('RPyLong_AsUnsignedLongLong', lambda pyo: r_ulonglong(pyo._obj.value)), + 'LONGLONG': ('RPyLong_AsLongLong', lambda pyo: r_longlong(pyo._obj.value)), + 'ULONG': ('RPyLong_AsUnsignedLong', lambda pyo: r_uint(pyo._obj.value)), + 'LONG': ('PyInt_AsLong', lambda pyo: int(pyo._obj.value)) } ll_to_py_conversion_functions = { - UnsignedLongLong: ('PyLong_FromUnsignedLongLong', lambda i: pyobjectptr(i)), - SignedLongLong: ('PyLong_FromLongLong', lambda i: pyobjectptr(i)), - Unsigned: ('PyLong_FromUnsignedLong', lambda i: pyobjectptr(i)), - Signed: ('PyInt_FromLong', lambda i: pyobjectptr(i)), + 'ULONGLONG': ('PyLong_FromUnsignedLongLong', lambda i: pyobjectptr(i)), + 'LONGLONG': ('PyLong_FromLongLong', lambda i: pyobjectptr(i)), + 'ULONG': ('PyLong_FromUnsignedLong', lambda i: pyobjectptr(i)), + 'LONG': ('PyInt_FromLong', lambda i: pyobjectptr(i)), } - + +# XXX this does not work on win64 +py_to_ll_conversion_functions['Signed'] = py_to_ll_conversion_functions['LONG'] +ll_to_py_conversion_functions['Signed'] = ll_to_py_conversion_functions['LONG'] +py_to_ll_conversion_functions['SIZE_T'] = py_to_ll_conversion_functions['ULONG'] +ll_to_py_conversion_functions['SIZE_T'] = ll_to_py_conversion_functions['ULONG'] class __extend__(pairtype(PyObjRepr, IntegerRepr)): def convert_from_to((r_from, r_to), v, llops): tolltype = r_to.lowleveltype - fnname, callable = py_to_ll_conversion_functions[tolltype] + fnname, callable = py_to_ll_conversion_functions[tolltype._name] return llops.gencapicall(fnname, [v], resulttype=r_to, _callable=callable) class __extend__(pairtype(IntegerRepr, PyObjRepr)): def convert_from_to((r_from, r_to), v, llops): fromlltype = r_from.lowleveltype - fnname, callable = ll_to_py_conversion_functions[fromlltype] + fnname, callable = ll_to_py_conversion_functions[fromlltype._name] return llops.gencapicall(fnname, [v], resulttype=pyobj_repr, _callable=callable) From commits-noreply at bitbucket.org Wed Mar 2 13:55:29 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:55:29 +0100 (CET) Subject: [pypy-svn] pypy move-rfloat: Fix an import and the last failure in move-rfloat branch Message-ID: <20110302125529.31E5B2A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: move-rfloat Changeset: r42386:f366c16081b1 Date: 2011-03-02 13:54 +0100 http://bitbucket.org/pypy/pypy/changeset/f366c16081b1/ Log: Fix an import and the last failure in move-rfloat branch diff --git a/pypy/translator/c/test/test_genc.py b/pypy/translator/c/test/test_genc.py --- a/pypy/translator/c/test/test_genc.py +++ b/pypy/translator/c/test/test_genc.py @@ -270,8 +270,7 @@ assert res == 1.5 def test_nan_and_special_values(): - from pypy.translator.c.primitive import isnan, isinf - from pypy.rlib.rarithmetic import copysign + from pypy.rlib.rfloat import isnan, isinf, copysign inf = 1e300 * 1e300 assert isinf(inf) nan = inf/inf From commits-noreply at bitbucket.org Wed Mar 2 13:58:28 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:58:28 +0100 (CET) Subject: [pypy-svn] pypy move-rfloat: Close soon-merged branch Message-ID: <20110302125828.32BEA2A2031@codespeak.net> Author: Amaury Forgeot d'Arc Branch: move-rfloat Changeset: r42387:3b96532df128 Date: 2011-03-02 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/3b96532df128/ Log: Close soon-merged branch From commits-noreply at bitbucket.org Wed Mar 2 13:58:28 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 13:58:28 +0100 (CET) Subject: [pypy-svn] pypy default: merge move-rfloat branch: float and double routines are moved to rlib/rfloat.py, Message-ID: <20110302125828.712D62A2032@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42388:680642124714 Date: 2011-03-02 13:57 +0100 http://bitbucket.org/pypy/pypy/changeset/680642124714/ Log: merge move-rfloat branch: float and double routines are moved to rlib/rfloat.py, let rarithmetic.py deal with integer types only. From commits-noreply at bitbucket.org Wed Mar 2 17:24:25 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 16:24:25 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Yesterday night's progress. Message-ID: <20110302162425.18127.84007@bitbucket01.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/cdd265f8fdd8/ changeset: r3328:cdd265f8fdd8 branch: extradoc user: arigo date: 2011-03-02 16:36:40 summary: Yesterday night's progress. affected #: 1 file (1001 bytes) --- a/talk/stanford-ee380-2011/talk.txt Tue Mar 01 16:17:43 2011 -0500 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 07:36:40 2011 -0800 @@ -221,6 +221,14 @@ RPython code into C code (mainly) +PyPy's Python interpreter +------------------------- + +* A priori similar to CPython, but written in RPython. + +* See demo (py.py) + + The translation toolchain ------------------------- @@ -231,12 +239,17 @@ * See demo -PyPy's Python interpreter -------------------------- +A bit of history +---------------- -* A priori similar to CPython, but written in RPython. +* Squeak and Scheme48 are also interpreters written in themselves -* See demo (py.py) +* Or more precisely, like PyPy, a subset of themselves + +* But in PyPy, the RPython subset is at a higher level + +* General rule: *every aspect that is independent from the high-level + description of the interpreter is left out of RPython* RPython is still mostly Python @@ -268,19 +281,6 @@ * here, the code in ``f()`` is RPython, but the loop around it is not. -A bit of history ----------------- - -* Squeak and Scheme48 are also interpreters written in themselves - -* Or more precisely, like PyPy, a subset of themselves - -* In PyPy, the RPython subset is at a higher level - -* General rule: *every aspect that is independent from the high-level - description of the interpreter is left out of RPython* - - Architecture: the interpreter @@ -418,4 +418,53 @@ Architecture: the translation toolchain --------------------------------------------------------------------- -xxx + +Overview +-------- + +* "Translation toolchain": statically compiles RPython code + +* Produces C code (or JVM or .NET code, experimentally) + +* Every aspect that is independent from the high-level + description of the interpreter is left out of RPython + +* Instead, they are added during translation + + +Various aspects +--------------- + +* The object model, e.g. how to turn RPython classes and instances + to C structs + +* Garbage collection + +* Execution model: regular or stackless + +* Just-in-Time compiler + + +Translation overview (1) +------------------------ + +* Start with the live RPython program + +* Build the Control Flow Graphs (CFGs) of the functions + +* Perform global type inference + +* We get a type-annotated version of the CFGs + + +Translation overview (2) +------------------------ + +* "Lower" the level of the CFGs: transform their Python-like operations + into C-like operations + +* Do a number of additional transformations to insert the selected "aspects" + +* Generate C code from the low-level CFGs + + Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 17:28:10 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 16:28:10 -0000 Subject: [pypy-svn] commit/extradoc: arigo: GC section Message-ID: <20110302162810.18130.64104@bitbucket01.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/947ce291477f/ changeset: r3329:947ce291477f branch: extradoc user: arigo date: 2011-03-02 17:27:48 summary: GC section affected #: 1 file (2.4 KB) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 07:36:40 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 08:27:48 2011 -0800 @@ -431,6 +431,33 @@ * Instead, they are added during translation +* PyPy = hybrid "research base" + "production-ready" + + +Translation overview (1) +------------------------ + +* Start with the live RPython program + +* Build the Control Flow Graphs (CFGs) of the functions + +* Perform global type inference + +* We get a type-annotated version of the CFGs + +* Demo + + +Translation overview (2) +------------------------ + +* "Lower" the level of the CFGs: transform their Python-like operations + into C-like operations + +* Do a number of additional transformations to insert the selected "aspects" + +* Generate C code from the low-level CFGs + Various aspects --------------- @@ -445,26 +472,112 @@ * Just-in-Time compiler -Translation overview (1) ------------------------- +The object model +---------------- -* Start with the live RPython program +* Called "RTyping" internally -* Build the Control Flow Graphs (CFGs) of the functions +* Can target "lltype" or "ootype" -* Perform global type inference +* "lltype" = low-level types = C-like structs and arrays -* We get a type-annotated version of the CFGs +* "ootype" = object-oriented types, for JVM or .NET -Translation overview (2) ------------------------- +The execution model +------------------- -* "Lower" the level of the CFGs: transform their Python-like operations - into C-like operations +* Optionally do a "stackless transformation" -* Do a number of additional transformations to insert the selected "aspects" +* We get microthread capabilities (soft threads) -* Generate C code from the low-level CFGs +* Even if the source code of the interpreter is just recursive + + +Architecture: Garbage collection +--------------------------------------------------------------------- + + +Purpose +------- + +* RPython assumes automatic memory management, like Python + +* But of course C code does not + +* We can use the Boehm GC, but it is far too slow + +* Instead, we wrote our own GCs, and alloc operations are replaced + by calls to the GC + +* Handles finding and freeing unused memory + + +Overview +-------- + +* The GC is written in RPython, too + +* Analyzed like the rest of the program during translation + +* This approach allows testing at all levels + + +The GCs we have written +----------------------- + +* Currently used: "minimark", a generational GC with one young generation + and using mark-and-sweep for the old generation + +* Previously: a hybrid collector using generational semi-space collection + and mark-and-sweep for the oldest generation (too complicated) + +* Pretty standard, non-concurrent, non-thread-safe collectors + + +Old experiments +--------------- + +* Reference counting (like CPython)... Does not work well. + +* Mark-and-sweep, a fully non-moving collector + +* Mark-and-compact, a fully compacting, generationless collector, + similar to Squeak. + +* Lesson learned: using a generational collector is essential for + dynamic languages like Python + + +GC transformer +-------------- + +* Inserting a GC in a program being translated is handled by the "GC + transformer" + +* Easy to customize, no fixed API + + +API example (minimark GC) +------------------------- + +* The GC provides functions like "malloc" + +* Plus a number of others: hash, identity_hash, weakref support, + finalizer support + +* The GC transformer inserts tables describing the structure of + RPython objects: sizes, location of further references, etc. + + +Finding the stack roots +----------------------- + +* The hard part: finding all pointers to GC objects from local variables + in the C stack + +* ANSI C solution: all pointers are copied to and fro some custom stack + +* Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 18:01:05 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 17:01:05 -0000 Subject: [pypy-svn] commit/extradoc: 2 new changesets Message-ID: <20110302170105.18128.20241@bitbucket01.managed.contegix.com> 2 new changesets in extradoc: http://bitbucket.org/pypy/extradoc/changeset/0b935badb6f9/ changeset: r3330:0b935badb6f9 branch: extradoc user: lac date: 2011-03-02 17:59:19 summary: 2 pdf fliers. the corners one shows where to cut for 8.5 by 11 in case needeed affected #: 2 files (817.9 KB) Diff too large to display. http://bitbucket.org/pypy/extradoc/changeset/eefd0fd72319/ changeset: r3331:eefd0fd72319 branch: extradoc user: lac date: 2011-03-02 18:00:03 summary: merge heads affected #: 0 files (0 bytes) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 17:59:19 2011 +0100 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 18:00:03 2011 +0100 @@ -431,6 +431,33 @@ * Instead, they are added during translation +* PyPy = hybrid "research base" + "production-ready" + + +Translation overview (1) +------------------------ + +* Start with the live RPython program + +* Build the Control Flow Graphs (CFGs) of the functions + +* Perform global type inference + +* We get a type-annotated version of the CFGs + +* Demo + + +Translation overview (2) +------------------------ + +* "Lower" the level of the CFGs: transform their Python-like operations + into C-like operations + +* Do a number of additional transformations to insert the selected "aspects" + +* Generate C code from the low-level CFGs + Various aspects --------------- @@ -445,26 +472,112 @@ * Just-in-Time compiler -Translation overview (1) ------------------------- +The object model +---------------- -* Start with the live RPython program +* Called "RTyping" internally -* Build the Control Flow Graphs (CFGs) of the functions +* Can target "lltype" or "ootype" -* Perform global type inference +* "lltype" = low-level types = C-like structs and arrays -* We get a type-annotated version of the CFGs +* "ootype" = object-oriented types, for JVM or .NET -Translation overview (2) ------------------------- +The execution model +------------------- -* "Lower" the level of the CFGs: transform their Python-like operations - into C-like operations +* Optionally do a "stackless transformation" -* Do a number of additional transformations to insert the selected "aspects" +* We get microthread capabilities (soft threads) -* Generate C code from the low-level CFGs +* Even if the source code of the interpreter is just recursive + + +Architecture: Garbage collection +--------------------------------------------------------------------- + + +Purpose +------- + +* RPython assumes automatic memory management, like Python + +* But of course C code does not + +* We can use the Boehm GC, but it is far too slow + +* Instead, we wrote our own GCs, and alloc operations are replaced + by calls to the GC + +* Handles finding and freeing unused memory + + +Overview +-------- + +* The GC is written in RPython, too + +* Analyzed like the rest of the program during translation + +* This approach allows testing at all levels + + +The GCs we have written +----------------------- + +* Currently used: "minimark", a generational GC with one young generation + and using mark-and-sweep for the old generation + +* Previously: a hybrid collector using generational semi-space collection + and mark-and-sweep for the oldest generation (too complicated) + +* Pretty standard, non-concurrent, non-thread-safe collectors + + +Old experiments +--------------- + +* Reference counting (like CPython)... Does not work well. + +* Mark-and-sweep, a fully non-moving collector + +* Mark-and-compact, a fully compacting, generationless collector, + similar to Squeak. + +* Lesson learned: using a generational collector is essential for + dynamic languages like Python + + +GC transformer +-------------- + +* Inserting a GC in a program being translated is handled by the "GC + transformer" + +* Easy to customize, no fixed API + + +API example (minimark GC) +------------------------- + +* The GC provides functions like "malloc" + +* Plus a number of others: hash, identity_hash, weakref support, + finalizer support + +* The GC transformer inserts tables describing the structure of + RPython objects: sizes, location of further references, etc. + + +Finding the stack roots +----------------------- + +* The hard part: finding all pointers to GC objects from local variables + in the C stack + +* ANSI C solution: all pointers are copied to and fro some custom stack + +* Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 19:13:43 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 18:13:43 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Finished rough draft. Message-ID: <20110302181343.12970.28884@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/c47f7b63ab2a/ changeset: r3332:c47f7b63ab2a branch: extradoc user: arigo date: 2011-03-02 19:13:24 summary: Finished rough draft. affected #: 1 file (3.8 KB) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 18:00:03 2011 +0100 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:13:24 2011 -0800 @@ -91,7 +91,7 @@ Python is a mess ---------------- -How ``a.attr`` or ``a.method()`` works: +How ``obj.attr`` or ``obj.method()`` works: * ... @@ -176,20 +176,18 @@ * ...or at least, it is "often" the case -Speed ------ +http://speed.pypy.org/ +---------------------- .. image:: speed.png -http://speed.pypy.org/ - And (optionally) extra features ------------------------------- * "Stackless" -* Non-Python +* Non-Python interpreters * and many smaller experiments @@ -496,7 +494,7 @@ -Architecture: Garbage collection +Garbage collection --------------------------------------------------------------------- @@ -509,15 +507,18 @@ * We can use the Boehm GC, but it is far too slow -* Instead, we wrote our own GCs, and alloc operations are replaced - by calls to the GC - -* Handles finding and freeing unused memory +* Remember that our GC needs to support both allocating Python-visible + objects and internal objects of the interpreter (lists, instances...) Overview -------- +* We wrote our own GCs, and each alloc operation in the CFGs is replaced + with a call to the GC + +* Handles finding and freeing unused memory + * The GC is written in RPython, too * Analyzed like the rest of the program during translation @@ -581,3 +582,183 @@ * ANSI C solution: all pointers are copied to and fro some custom stack * Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables + + + + +Just-in-Time Compiler +--------------------------------------------------------------------- + + +Goal +---- + +* Speed up the interpreter written in RPython + +* Independent of the language that is being interpreted + +* Let us call it the P-interpreter (P = Python or other) + + +What is a JIT +------------- + +* A JIT selects pieces of the user program (say Java) that would benefit + from compilation instead of interpretation + +* A "method JIT" selects individual Java functions and compiles them, + possibly doing some inlining to improve performance (HotSpot, Psyco) + +* A "tracing JIT" selects individual code paths from loops and compiles + them, inlining aggressively (TraceMonkey, PyPy) + + +Tracing +------- + +* Run the user program, and do some lightweight profiling of loops + +* When a loop is run often enough, enter "Tracing Mode" + +* Run one more iteration of the loop in this mode + +* In addition to actually running the next iteration, it records a "trace" + + +Tracing (2) +----------- + +* The trace is then turned into a machine code loop, and directly executed + +* Runs all the further iterations of the loop + + +Tracing (3) +----------- + +* The machine code contains "guards" checking that all conditions met + during tracing are still valid + +* When a guard fails (latest: at the end of the loop), we fall back to + the regular P-interpreter + + +Meta-Tracing in PyPy +-------------------- + +* The explanation above assumes a tracing JIT for the full Python + language + +* Would need to be maintained whenever we change the Python version we + support + +* Instead, we have a "meta-tracing JIT" + +* We trace the P-interpreter's main loop (running N times) interpreting + a P loop (running once) + + +Demo +---- + + +Architecture of the PyPy JIT +---------------------------- + +* In advance, turn the CFGs of the P-interpreter into some bytecode + representation called "jitcode" + +* Uses some hints provided by the P-interpreter author (but not many) + +* "Links" into the P-interpreter's bytecode dispatch loop + +* In this way we add lightweight profiling code + + +Meta-Tracing +------------ + +* When thresholds are reached, we start tracing + +* Tracing is done by running the "jitcodes" in a custom interpreter, + and recording a trace of all operations performed + +* Tracing is slow (double interpretation) but only runs for one iteration + of the loop + + +Optimization +------------ + +* Advanced optimizations of the trace: escaping analysis, integer bounds, + store sinking, string handling, FFI calls, unrolling, virtualrefs... + + +Machine Code Backend +-------------------- + +* Turns a trace into machine code + +* Simple register allocation (linear code) + +* x86, x86-64, (ARM) + +* Guards compiled as conditional jumps to code that restore the full state + + +Blackhole interpreter +--------------------- + +* When a guard fails, we need to go back to the regular P-interpreter + +* Cannot easily re-enter the P-interpreter from anywhere, because it + is just C code + +* Instead we use one more interpreter, the "blackhole interpreter". + + +Bridges +------- + +* When a guard fails often enough, run again the JIT from there + +* Meta-trace, optimize, generate machine code, run it + +* Such extra traces are called "bridges" instead of "loops" + +* In practice, most loops end up needing some number of bridges + +* We get "trees" of machine code + + +More topics +----------- + +* Loops, bridges and "preamble loops" + +* Virtualizables + +* GC integration + +* Memory management of machine code + +* ... + + + + +Conclusion +--------------------------------------------------------------------- + + +Conclusion +---------- + +* PyPy is a platform for writing efficient interpreters for + dynamic languages + +* http://pypy.org/ + +* http://speed.pypy.org/ + +* irc: ``#pypy at freenode.net`` Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 19:14:48 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 18:14:48 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Also add the HTML version. Message-ID: <20110302181448.18129.63490@bitbucket01.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/3f67fdb28f35/ changeset: r3333:3f67fdb28f35 branch: extradoc user: arigo date: 2011-03-02 19:14:42 summary: Also add the HTML version. affected #: 1 file (28.5 KB) Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 19:35:40 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 18:35:40 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Typos and comments by Laura Message-ID: <20110302183540.26905.48442@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/fd9cd0537d70/ changeset: r3334:fd9cd0537d70 branch: extradoc user: arigo date: 2011-03-02 19:35:16 summary: Typos and comments by Laura affected #: 1 file (7 bytes) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:14:42 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:35:16 2011 -0800 @@ -36,8 +36,8 @@ print Foo("hello").double().value -In two words ------------- +In two points +------------- * Strongly, trivially, dynamically typed language @@ -157,6 +157,19 @@ * Now contains about 200 KLoC, and 150 KLoc of tests +A bit of history +---------------- + +* Squeak and Scheme48 are also interpreters written in themselves + +* Or more precisely, like PyPy, a subset of themselves + +* But in PyPy, the subset is at a higher level + +* General rule: *every aspect that is independent from the high-level + description of the interpreter is left out of it* + + What is the point of PyPy? -------------------------- @@ -237,19 +250,6 @@ * See demo -A bit of history ----------------- - -* Squeak and Scheme48 are also interpreters written in themselves - -* Or more precisely, like PyPy, a subset of themselves - -* But in PyPy, the RPython subset is at a higher level - -* General rule: *every aspect that is independent from the high-level - description of the interpreter is left out of RPython* - - RPython is still mostly Python ------------------------------ @@ -579,7 +579,7 @@ * The hard part: finding all pointers to GC objects from local variables in the C stack -* ANSI C solution: all pointers are copied to and fro some custom stack +* ANSI C solution: all pointers are copied to and from some custom stack * Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables @@ -603,10 +603,10 @@ What is a JIT ------------- -* A JIT selects pieces of the user program (say Java) that would benefit +* A JIT selects pieces of the user program (in language P) that would benefit from compilation instead of interpretation -* A "method JIT" selects individual Java functions and compiles them, +* A "method JIT" selects individual P functions and compiles them, possibly doing some inlining to improve performance (HotSpot, Psyco) * A "tracing JIT" selects individual code paths from loops and compiles @@ -703,7 +703,7 @@ * x86, x86-64, (ARM) -* Guards compiled as conditional jumps to code that restore the full state +* Guards compiled as conditional jumps to code that restores the full state Blackhole interpreter Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 19:36:24 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 18:36:24 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Regenerate html. Message-ID: <20110302183624.18128.87490@bitbucket01.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/75f5403529e3/ changeset: r3335:75f5403529e3 branch: extradoc user: arigo date: 2011-03-02 19:36:09 summary: Regenerate html. affected #: 1 file (7 bytes) --- a/talk/stanford-ee380-2011/talk.html Wed Mar 02 10:35:16 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.html Wed Mar 02 10:36:09 2011 -0800 @@ -364,8 +364,8 @@ print Foo("hello").double().value -
-

In two words

+
+

In two points

  • Strongly, trivially, dynamically typed language
  • Ints, floats, longs, string, unicode, @@ -461,6 +461,16 @@
  • Now contains about 200 KLoC, and 150 KLoc of tests
+
+

A bit of history

+
    +
  • Squeak and Scheme48 are also interpreters written in themselves
  • +
  • Or more precisely, like PyPy, a subset of themselves
  • +
  • But in PyPy, the subset is at a higher level
  • +
  • General rule: every aspect that is independent from the high-level +description of the interpreter is left out of it
  • +
+

What is the point of PyPy?

    @@ -524,16 +534,6 @@
  • See demo
-
-

A bit of history

-
    -
  • Squeak and Scheme48 are also interpreters written in themselves
  • -
  • Or more precisely, like PyPy, a subset of themselves
  • -
  • But in PyPy, the RPython subset is at a higher level
  • -
  • General rule: every aspect that is independent from the high-level -description of the interpreter is left out of RPython
  • -
-

RPython is still mostly Python

    @@ -798,7 +798,7 @@
    • The hard part: finding all pointers to GC objects from local variables in the C stack
    • -
    • ANSI C solution: all pointers are copied to and fro some custom stack
    • +
    • ANSI C solution: all pointers are copied to and from some custom stack
    • Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables
@@ -816,9 +816,9 @@

What is a JIT

    -
  • A JIT selects pieces of the user program (say Java) that would benefit +
  • A JIT selects pieces of the user program (in language P) that would benefit from compilation instead of interpretation
  • -
  • A "method JIT" selects individual Java functions and compiles them, +
  • A "method JIT" selects individual P functions and compiles them, possibly doing some inlining to improve performance (HotSpot, Psyco)
  • A "tracing JIT" selects individual code paths from loops and compiles them, inlining aggressively (TraceMonkey, PyPy)
  • @@ -897,7 +897,7 @@
  • Turns a trace into machine code
  • Simple register allocation (linear code)
  • x86, x86-64, (ARM)
  • -
  • Guards compiled as conditional jumps to code that restore the full state
  • +
  • Guards compiled as conditional jumps to code that restores the full state
Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 19:57:31 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 18:57:31 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Updates. Change the logo. Message-ID: <20110302185731.12973.97206@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/e651359b8442/ changeset: r3336:e651359b8442 branch: extradoc user: arigo date: 2011-03-02 19:57:24 summary: Updates. Change the logo. affected #: 3 files (3.6 KB) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:36:09 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:57:24 2011 -0800 @@ -762,3 +762,6 @@ * http://speed.pypy.org/ * irc: ``#pypy at freenode.net`` + +* noisebridge sprint this weekend (from 10am): + https://www.noisebridge.net/wiki/Getting_Here Binary file talk/stanford-ee380-2011/ui/py-web.png has changed --- a/talk/stanford-ee380-2011/ui/py.css Wed Mar 02 10:36:09 2011 -0800 +++ b/talk/stanford-ee380-2011/ui/py.css Wed Mar 02 10:57:24 2011 -0800 @@ -20,7 +20,7 @@ background-image: url("py-web.png"); background-repeat: no-repeat; margin: 3px; - height: 120px; + height: 76px; border-bottom: 1px solid black; } Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 20:23:00 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:00 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: hg merge default Message-ID: <20110302192300.AFCC92A2070@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42389:8cc22ca5785c Date: 2011-03-02 13:58 +0100 http://bitbucket.org/pypy/pypy/changeset/8cc22ca5785c/ Log: hg merge default diff --git a/pypy/rlib/rarithmetic.py b/pypy/rlib/rarithmetic.py --- a/pypy/rlib/rarithmetic.py +++ b/pypy/rlib/rarithmetic.py @@ -33,12 +33,10 @@ """ -import sys, math +import sys from pypy.rpython import extregistry from pypy.rlib import objectmodel -USE_SHORT_FLOAT_REPR = True # XXX make it a translation option? - # set up of machine internals _bits = 0 _itest = 1 @@ -60,117 +58,6 @@ LONG_BIT_SHIFT += 1 assert LONG_BIT_SHIFT < 99, "LONG_BIT_SHIFT value not found?" -INFINITY = 1e200 * 1e200 -NAN = INFINITY / INFINITY - -try: - # Try to get math functions added in 2.6. - from math import isinf, isnan, copysign, acosh, asinh, atanh, log1p -except ImportError: - def isinf(x): - "NOT_RPYTHON" - return x == INFINITY or x == -INFINITY - - def isnan(v): - "NOT_RPYTHON" - return v != v - - def copysign(x, y): - """NOT_RPYTHON. Return x with the sign of y""" - if x < 0.: - x = -x - if y > 0. or (y == 0. and math.atan2(y, -1.) > 0.): - return x - else: - return -x - - _2_to_m28 = 3.7252902984619141E-09; # 2**-28 - _2_to_p28 = 268435456.0; # 2**28 - _ln2 = 6.93147180559945286227E-01 - - def acosh(x): - "NOT_RPYTHON" - if isnan(x): - return NAN - if x < 1.: - raise ValueError("math domain error") - if x >= _2_to_p28: - if isinf(x): - return x - else: - return math.log(x) + _ln2 - if x == 1.: - return 0. - if x >= 2.: - t = x * x - return math.log(2. * x - 1. / (x + math.sqrt(t - 1.0))) - t = x - 1.0 - return log1p(t + math.sqrt(2. * t + t * t)) - - def asinh(x): - "NOT_RPYTHON" - absx = abs(x) - if isnan(x) or isinf(x): - return x - if absx < _2_to_m28: - return x - if absx > _2_to_p28: - w = math.log(absx) + _ln2 - elif absx > 2.: - w = math.log(2. * absx + 1. / (math.sqrt(x * x + 1.) + absx)) - else: - t = x * x - w = log1p(absx + t / (1. + math.sqrt(1. + t))) - return copysign(w, x) - - def atanh(x): - "NOT_RPYTHON" - if isnan(x): - return x - absx = abs(x) - if absx >= 1.: - raise ValueError("math domain error") - if absx < _2_to_m28: - return x - if absx < .5: - t = absx + absx - t = .5 * log1p(t + t * absx / (1. - absx)) - else: - t = .5 * log1p((absx + absx) / (1. - absx)) - return copysign(t, x) - - def log1p(x): - "NOT_RPYTHON" - from pypy.rlib import rfloat - if abs(x) < rfloat.DBL_EPSILON // 2.: - return x - elif -.5 <= x <= 1.: - y = 1. + x - return math.log(y) - ((y - 1.) - x) / y - else: - return math.log(1. + x) - -try: - from math import expm1 # Added in Python 2.7. -except ImportError: - def expm1(x): - "NOT_RPYTHON" - if abs(x) < .7: - u = math.exp(x) - if u == 1.: - return x - return (u - 1.) * x / math.log(u) - return math.exp(x) - 1. - -def round_away(x): - # round() from libm, which is not available on all platforms! - absx = abs(x) - if absx - math.floor(absx) >= .5: - r = math.ceil(absx) - else: - r = math.floor(absx) - return copysign(r, x) - def intmask(n): if isinstance(n, int): return int(n) # possibly bool->int @@ -243,6 +130,7 @@ # successfully be casted to an int. if sys.maxint == 2147483647: def ovfcheck_float_to_int(x): + from pypy.rlib.rfloat import isnan if isnan(x): raise OverflowError if -2147483649.0 < x < 2147483648.0: @@ -253,6 +141,7 @@ # Note the "<= x <" here, as opposed to "< x <" above. # This is justified by test_typed in translator/c/test. def ovfcheck_float_to_int(x): + from pypy.rlib.rfloat import isnan if isnan(x): raise OverflowError if -9223372036854776832.0 <= x < 9223372036854775296.0: @@ -550,259 +439,6 @@ (False, 64): r_ulonglong, } -def rstring_to_float(s): - if USE_SHORT_FLOAT_REPR: - from pypy.rlib.rdtoa import strtod - return strtod(s) - - sign, before_point, after_point, exponent = break_up_float(s) - - if not before_point and not after_point: - raise ValueError - - return parts_to_float(sign, before_point, after_point, exponent) - -# float as string -> sign, beforept, afterpt, exponent -def break_up_float(s): - i = 0 - - sign = '' - before_point = '' - after_point = '' - exponent = '' - - if s[i] in '+-': - sign = s[i] - i += 1 - - while i < len(s) and s[i] in '0123456789': - before_point += s[i] - i += 1 - - if i == len(s): - return sign, before_point, after_point, exponent - - if s[i] == '.': - i += 1 - while i < len(s) and s[i] in '0123456789': - after_point += s[i] - i += 1 - - if i == len(s): - return sign, before_point, after_point, exponent - - if s[i] not in 'eE': - raise ValueError - - i += 1 - if i == len(s): - raise ValueError - - if s[i] in '-+': - exponent += s[i] - i += 1 - - if i == len(s): - raise ValueError - - while i < len(s) and s[i] in '0123456789': - exponent += s[i] - i += 1 - - if i != len(s): - raise ValueError - - return sign, before_point, after_point, exponent - -# string -> float helper - -def parts_to_float(sign, beforept, afterpt, exponent): - "NOT_RPYTHON" - if not exponent: - exponent = '0' - return float("%s%s.%se%s" % (sign, beforept, afterpt, exponent)) - -# float -> string - -DTSF_STR_PRECISION = 12 - -DTSF_SIGN = 0x1 -DTSF_ADD_DOT_0 = 0x2 -DTSF_ALT = 0x4 - -DIST_FINITE = 1 -DIST_NAN = 2 -DIST_INFINITY = 3 - -# Equivalent to CPython's PyOS_double_to_string -def _formatd(x, code, precision, flags): - "NOT_RPYTHON" - if flags & DTSF_ALT: - alt = '#' - else: - alt = '' - - if code == 'r': - fmt = "%r" - else: - fmt = "%%%s.%d%s" % (alt, precision, code) - s = fmt % (x,) - - if flags & DTSF_ADD_DOT_0: - # We want float numbers to be recognizable as such, - # i.e., they should contain a decimal point or an exponent. - # However, %g may print the number as an integer; - # in such cases, we append ".0" to the string. - for c in s: - if c in '.eE': - break - else: - s += '.0' - elif code == 'r' and s.endswith('.0'): - s = s[:-2] - - return s - -def formatd(x, code, precision, flags=0): - if USE_SHORT_FLOAT_REPR: - from pypy.rlib.rdtoa import dtoa_formatd - return dtoa_formatd(x, code, precision, flags) - else: - return _formatd(x, code, precision, flags) - -def double_to_string(value, tp, precision, flags): - if isnan(value): - special = DIST_NAN - elif isinf(value): - special = DIST_INFINITY - else: - special = DIST_FINITE - result = formatd(value, tp, precision, flags) - return result, special - -if USE_SHORT_FLOAT_REPR: - def round_double(value, ndigits): - # The basic idea is very simple: convert and round the double to - # a decimal string using _Py_dg_dtoa, then convert that decimal - # string back to a double with _Py_dg_strtod. There's one minor - # difficulty: Python 2.x expects round to do - # round-half-away-from-zero, while _Py_dg_dtoa does - # round-half-to-even. So we need some way to detect and correct - # the halfway cases. - - # a halfway value has the form k * 0.5 * 10**-ndigits for some - # odd integer k. Or in other words, a rational number x is - # exactly halfway between two multiples of 10**-ndigits if its - # 2-valuation is exactly -ndigits-1 and its 5-valuation is at - # least -ndigits. For ndigits >= 0 the latter condition is - # automatically satisfied for a binary float x, since any such - # float has nonnegative 5-valuation. For 0 > ndigits >= -22, x - # needs to be an integral multiple of 5**-ndigits; we can check - # this using fmod. For -22 > ndigits, there are no halfway - # cases: 5**23 takes 54 bits to represent exactly, so any odd - # multiple of 0.5 * 10**n for n >= 23 takes at least 54 bits of - # precision to represent exactly. - - sign = copysign(1.0, value) - value = abs(value) - - # find 2-valuation value - m, expo = math.frexp(value) - while m != math.floor(m): - m *= 2.0 - expo -= 1 - - # determine whether this is a halfway case. - halfway_case = 0 - if expo == -ndigits - 1: - if ndigits >= 0: - halfway_case = 1 - elif ndigits >= -22: - # 22 is the largest k such that 5**k is exactly - # representable as a double - five_pow = 1.0 - for i in range(-ndigits): - five_pow *= 5.0 - if math.fmod(value, five_pow) == 0.0: - halfway_case = 1 - - # round to a decimal string; use an extra place for halfway case - strvalue = formatd(value, 'f', ndigits + halfway_case) - - if halfway_case: - buf = [c for c in strvalue] - if ndigits >= 0: - endpos = len(buf) - 1 - else: - endpos = len(buf) + ndigits - # Sanity checks: there should be exactly ndigits+1 places - # following the decimal point, and the last digit in the - # buffer should be a '5' - if not objectmodel.we_are_translated(): - assert buf[endpos] == '5' - if '.' in buf: - assert endpos == len(buf) - 1 - assert buf.index('.') == len(buf) - ndigits - 2 - - # increment and shift right at the same time - i = endpos - 1 - carry = 1 - while i >= 0: - digit = ord(buf[i]) - if digit == ord('.'): - buf[i+1] = chr(digit) - i -= 1 - digit = ord(buf[i]) - - carry += digit - ord('0') - buf[i+1] = chr(carry % 10 + ord('0')) - carry /= 10 - i -= 1 - buf[0] = chr(carry + ord('0')) - if ndigits < 0: - buf.append('0') - - strvalue = ''.join(buf) - - return sign * rstring_to_float(strvalue) - -else: - # fallback version, to be used when correctly rounded - # binary<->decimal conversions aren't available - def round_double(value, ndigits): - if ndigits >= 0: - if ndigits > 22: - # pow1 and pow2 are each safe from overflow, but - # pow1*pow2 ~= pow(10.0, ndigits) might overflow - pow1 = math.pow(10.0, ndigits - 22) - pow2 = 1e22 - else: - pow1 = math.pow(10.0, ndigits) - pow2 = 1.0 - - y = (value * pow1) * pow2 - # if y overflows, then rounded value is exactly x - if isinf(y): - return value - - else: - pow1 = math.pow(10.0, -ndigits); - pow2 = 1.0 # unused; for translation - y = value / pow1 - - if y >= 0.0: - z = math.floor(y + 0.5) - else: - z = math.ceil(y - 0.5) - if math.fabs(y-z) == 1.0: # obscure case, see the test - z = y - - if ndigits >= 0: - z = (z / pow2) / pow1 - else: - z *= pow1 - return z - # the 'float' C type class r_singlefloat(object): diff --git a/pypy/translator/c/primitive.py b/pypy/translator/c/primitive.py --- a/pypy/translator/c/primitive.py +++ b/pypy/translator/c/primitive.py @@ -1,7 +1,8 @@ import sys from pypy.rlib.objectmodel import Symbolic, ComputedIntSymbolic from pypy.rlib.objectmodel import CDefinedIntSymbolic -from pypy.rlib.rarithmetic import r_longlong, isinf, isnan +from pypy.rlib.rarithmetic import r_longlong +from pypy.rlib.rfloat import isinf, isnan from pypy.rpython.lltypesystem.lltype import * from pypy.rpython.lltypesystem import rffi, llgroup from pypy.rpython.lltypesystem.llmemory import Address, \ From commits-noreply at bitbucket.org Wed Mar 2 20:23:06 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:06 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix rffi callback tests: Message-ID: <20110302192306.912112A2076@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42390:115ed6c4bee8 Date: 2011-03-02 15:08 +0100 http://bitbucket.org/pypy/pypy/changeset/115ed6c4bee8/ Log: Fix rffi callback tests: - arithmetic works only on lltype.Signed type - llptr callbacks must precisely cast their return type diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -243,7 +243,7 @@ from pypy.rpython.lltypesystem import lltype from pypy.rpython.lltypesystem.lloperation import llop if hasattr(callable, '_errorcode_'): - errorcode = callable._errorcode_ + errorcode = cast(TP.TO.RESULT, callable._errorcode_) else: errorcode = TP.TO.RESULT._example() callable_name = getattr(callable, '__name__', '?') diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py --- a/pypy/rpython/lltypesystem/test/test_rffi.py +++ b/pypy/rpython/lltypesystem/test/test_rffi.py @@ -407,7 +407,7 @@ def test_c_callback(self): eating_callback = self.eating_callback() def g(i): - return i + 3 + return cast(lltype.Signed, i) + 3 def f(): return eating_callback(3, g) @@ -423,7 +423,7 @@ return i def two(i): - return i + 2 + return cast(lltype.Signed, i) + 2 def f(i): if i > 3: @@ -441,7 +441,7 @@ eating_callback = self.eating_callback() def raising(i): - if i > 3: + if cast(lltype.Signed, i) > 3: raise ValueError else: return 3 @@ -456,8 +456,8 @@ def test_callback_already_llptr(self): eating_callback = self.eating_callback() def g(i): - return i + 3 - G = lltype.Ptr(lltype.FuncType([lltype.Signed], lltype.Signed)) + return cast(LONG, cast(lltype.Signed, i) + 3) + G = lltype.Ptr(lltype.FuncType([LONG], LONG)) def f(): return eating_callback(3, llhelper(G, g)) @@ -467,9 +467,9 @@ def test_pass_opaque_pointer_via_callback(self): eating_callback = self.eating_callback() - TP = lltype.Ptr(lltype.GcStruct('X', ('x', lltype.Signed))) + TP = lltype.Ptr(lltype.GcStruct('X', ('x', LONG))) struct = lltype.malloc(TP.TO) # gc structure - struct.x = 8 + struct.x = cast(LONG, 8) def g(i): return get_keepalive_object(i, TP).x From commits-noreply at bitbucket.org Wed Mar 2 20:23:07 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:07 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Turn some rffi.LONG into lltype.Signed, Message-ID: <20110302192307.888F32A2076@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42391:81d8c9304a58 Date: 2011-03-02 15:44 +0100 http://bitbucket.org/pypy/pypy/changeset/81d8c9304a58/ Log: Turn some rffi.LONG into lltype.Signed, all tests in test_rffi.py pass diff --git a/pypy/rpython/memory/gc/inspector.py b/pypy/rpython/memory/gc/inspector.py --- a/pypy/rpython/memory/gc/inspector.py +++ b/pypy/rpython/memory/gc/inspector.py @@ -109,8 +109,8 @@ self.gc = gc self.gcflag = gc.gcflag_extra self.fd = rffi.cast(rffi.INT, fd) - self.writebuffer = lltype.malloc(rffi.LONGP.TO, self.BUFSIZE, - flavor='raw') + self.writebuffer = lltype.malloc(rffi.CArrayPtr(lltype.Signed).TO, + self.BUFSIZE, flavor='raw') self.buf_count = 0 if self.gcflag == 0: self.seen = AddressDict() diff --git a/pypy/rlib/rdtoa.py b/pypy/rlib/rdtoa.py --- a/pypy/rlib/rdtoa.py +++ b/pypy/rlib/rdtoa.py @@ -41,8 +41,8 @@ try: result = dg_strtod(ll_input, end_ptr) - endpos = (rffi.cast(rffi.LONG, end_ptr[0]) - - rffi.cast(rffi.LONG, ll_input)) + endpos = (rffi.cast(lltype.Signed, end_ptr[0]) - + rffi.cast(lltype.Signed, ll_input)) if endpos == 0 or endpos < len(input): raise ValueError("invalid input at position %d" % (endpos,)) From commits-noreply at bitbucket.org Wed Mar 2 20:23:08 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:08 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix types in _winreg module Message-ID: <20110302192308.230422A2072@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42392:850936b3345f Date: 2011-03-02 16:23 +0100 http://bitbucket.org/pypy/pypy/changeset/850936b3345f/ Log: Fix types in _winreg module diff --git a/pypy/module/_winreg/interp_winreg.py b/pypy/module/_winreg/interp_winreg.py --- a/pypy/module/_winreg/interp_winreg.py +++ b/pypy/module/_winreg/interp_winreg.py @@ -270,7 +270,7 @@ if space.is_true(space.isinstance(w_value, space.w_int)): buflen = rffi.sizeof(rwin32.DWORD) buf1 = lltype.malloc(rffi.CArray(rwin32.DWORD), 1, flavor='raw') - buf1[0] = space.uint_w(w_value) + buf1[0] = rffi.cast(rwin32.DWORD, space.uint_w(w_value)) buf = rffi.cast(rffi.CCHARP, buf1) elif typ == rwinreg.REG_SZ or typ == rwinreg.REG_EXPAND_SZ: @@ -471,7 +471,7 @@ raiseWindowsError(space, ret, 'CreateKey') return space.wrap(W_HKEY(rethkey[0])) - at unwrap_spec(subkey=str, res=int, sam=rffi.r_uint) + at unwrap_spec(subkey=str, res=int, sam=r_uint) def CreateKeyEx(space, w_hkey, subkey, res=0, sam=rwinreg.KEY_WRITE): """key = CreateKey(key, sub_key) - Creates or opens the specified key. @@ -521,7 +521,7 @@ if ret != 0: raiseWindowsError(space, ret, 'RegDeleteValue') - at unwrap_spec(subkey=str, res=int, sam=rffi.r_uint) + at unwrap_spec(subkey=str, res=int, sam=r_uint) def OpenKey(space, w_hkey, subkey, res=0, sam=rwinreg.KEY_READ): """key = OpenKey(key, sub_key, res = 0, sam = KEY_READ) - Opens the specified key. @@ -624,7 +624,7 @@ # retrieve such a key name. with lltype.scoped_alloc(rffi.CCHARP.TO, 257) as buf: with lltype.scoped_alloc(rwin32.LPDWORD.TO, 1) as retValueSize: - retValueSize[0] = r_uint(257) # includes NULL terminator + retValueSize[0] = rffi.cast(rwin32.DWORD, r_uint(257)) ret = rwinreg.RegEnumKeyEx(hkey, index, buf, retValueSize, null_dword, None, null_dword, lltype.nullptr(rwin32.PFILETIME.TO)) From commits-noreply at bitbucket.org Wed Mar 2 20:23:09 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:09 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Add more lltype.Signed types here as well. Message-ID: <20110302192309.DD3132A207A@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42393:10fb4ec4999d Date: 2011-03-02 16:24 +0100 http://bitbucket.org/pypy/pypy/changeset/10fb4ec4999d/ Log: Add more lltype.Signed types here as well. This may need some rethinking at some point. diff --git a/pypy/rpython/tool/rfficache.py b/pypy/rpython/tool/rfficache.py --- a/pypy/rpython/tool/rfficache.py +++ b/pypy/rpython/tool/rfficache.py @@ -54,9 +54,12 @@ class Platform: def __init__(self): self.types = {} + # XXX similar to rarithmetic._predefined_ints? self.numbertype_to_rclass = { lltype.Signed: rarithmetic.r_int, lltype.Unsigned: rarithmetic.r_uint, + lltype.SignedLongLong: rarithmetic.r_longlong, + lltype.UnsignedLongLong: rarithmetic.r_ulonglong, } def inttype(self, name, c_name, signed, **kwds): From commits-noreply at bitbucket.org Wed Mar 2 20:23:10 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:10 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix test_clibffi.py Message-ID: <20110302192310.B0E0F2A207B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42394:bf83c545f4af Date: 2011-03-02 20:05 +0100 http://bitbucket.org/pypy/pypy/changeset/bf83c545f4af/ Log: Fix test_clibffi.py diff --git a/pypy/rlib/test/test_clibffi.py b/pypy/rlib/test/test_clibffi.py --- a/pypy/rlib/test/test_clibffi.py +++ b/pypy/rlib/test/test_clibffi.py @@ -298,9 +298,9 @@ sum_x_y = lib.getrawpointer('sum_x_y', [tpe.ffistruct], slong) buffer = lltype.malloc(rffi.LONGP.TO, 3, flavor='raw') - buffer[0] = 200 - buffer[1] = 220 - buffer[2] = 666 + buffer[0] = rffi.r_long(200) + buffer[1] = rffi.r_long(220) + buffer[2] = rffi.r_long(666) sum_x_y.call([rffi.cast(rffi.VOIDP, buffer)], rffi.cast(rffi.VOIDP, rffi.ptradd(buffer, 2))) assert buffer[2] == 420 @@ -408,8 +408,8 @@ del lib # already delete here buffer = lltype.malloc(rffi.LONGP.TO, 2, flavor='raw') - buffer[0] = 200 - buffer[1] = -1 + buffer[0] = rffi.r_long(200) + buffer[1] = rffi.r_long(-1) fun.call([rffi.cast(rffi.VOIDP, buffer)], rffi.cast(rffi.VOIDP, rffi.ptradd(buffer, 1))) assert buffer[1] == 242 @@ -423,10 +423,10 @@ def setup_class(cls): if sys.platform != 'win32': py.test.skip("Handle to libc library, Win-only test") - BaseFfiTest.setup_class(cls) + BaseFfiTest.setup_class() def test_get_libc_handle(self): - handle = get_libc_handle() + handle = rffi.cast(lltype.Signed, get_libc_handle()) print get_libc_name() print hex(handle) assert handle != 0 From commits-noreply at bitbucket.org Wed Mar 2 20:23:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:14 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: More fixes, found when attempting to translate. Message-ID: <20110302192314.4388C2A2074@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42395:511f2c681827 Date: 2011-03-02 20:17 +0100 http://bitbucket.org/pypy/pypy/changeset/511f2c681827/ Log: More fixes, found when attempting to translate. diff --git a/pypy/rlib/rarithmetic.py b/pypy/rlib/rarithmetic.py --- a/pypy/rlib/rarithmetic.py +++ b/pypy/rlib/rarithmetic.py @@ -437,7 +437,9 @@ _predefined_ints = { (True, 64): r_longlong, (False, 64): r_ulonglong, - } + (True, 32): build_int('r_int32', True, 32), + (False, 32): build_int('r_uint32', True, 32), +} # the 'float' C type diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -345,7 +345,7 @@ try: if cmd == _c.SIO_RCVALL: option_ptr = rffi.cast(rffi.INTP, value_ptr) - option_ptr[0] = space.int_w(w_option) + option_ptr[0] = rffi.cast(rffi.INT, space.int_w(w_option)) elif cmd == _c.SIO_KEEPALIVE_VALS: w_onoff, w_time, w_interval = space.unpackiterable(w_option) option_ptr = rffi.cast(lltype.Ptr(_c.tcp_keepalive), value_ptr) diff --git a/pypy/module/_multiprocessing/interp_win32.py b/pypy/module/_multiprocessing/interp_win32.py --- a/pypy/module/_multiprocessing/interp_win32.py +++ b/pypy/module/_multiprocessing/interp_win32.py @@ -136,13 +136,13 @@ statep = lltype.malloc(rffi.CArrayPtr(rffi.UINTP).TO, 3, flavor='raw', zero=True) try: if not space.is_w(w_pipemode, space.w_None): - state[0] = space.uint_w(w_pipemode) + state[0] = rffi.cast(rffi.UINT, space.uint_w(w_pipemode)) statep[0] = rffi.ptradd(state, 0) if not space.is_w(w_maxinstances, space.w_None): - state[1] = space.uint_w(w_maxinstances) + state[1] = rffi.cast(rffi.UINT, space.uint_w(w_maxinstances)) statep[1] = rffi.ptradd(state, 1) if not space.is_w(w_timeout, space.w_None): - state[2] = space.uint_w(w_timeout) + state[2] = rffi.cast(rffi.UINT, space.uint_w(w_timeout)) statep[2] = rffi.ptradd(state, 2) if not _SetNamedPipeHandleState(handle, statep[0], statep[1], statep[2]): raise wrap_windowserror(space, rwin32.lastWindowsError()) diff --git a/pypy/rlib/clibffi.py b/pypy/rlib/clibffi.py --- a/pypy/rlib/clibffi.py +++ b/pypy/rlib/clibffi.py @@ -134,10 +134,9 @@ FFI_TYPE_STRUCT = rffi_platform.ConstantInteger('FFI_TYPE_STRUCT') - size_t = rffi_platform.SimpleType("size_t", rffi.ULONG) ffi_abi = rffi_platform.SimpleType("ffi_abi", rffi.USHORT) - ffi_type = rffi_platform.Struct('ffi_type', [('size', rffi.ULONG), + ffi_type = rffi_platform.Struct('ffi_type', [('size', rffi.SIZE_T), ('alignment', rffi.USHORT), ('type', rffi.USHORT), ('elements', FFI_TYPE_PP)]) @@ -151,7 +150,7 @@ def configure_simple_type(type_name): l = lltype.malloc(FFI_TYPE_P.TO, flavor='raw', immortal=True) - for tp, name in [(size_t, 'size'), + for tp, name in [(rffi.SIZE_T, 'size'), (rffi.USHORT, 'alignment'), (rffi.USHORT, 'type')]: value = getattr(cConfig, '%s_%s' % (type_name, name)) @@ -178,7 +177,6 @@ setattr(cConfig, k, v) FFI_TYPE_P.TO.become(cConfig.ffi_type) -size_t = cConfig.size_t ffi_abi = cConfig.ffi_abi for name in type_names: From commits-noreply at bitbucket.org Wed Mar 2 20:23:15 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 2 Mar 2011 20:23:15 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Don't do automatic conversions that change the integer size Message-ID: <20110302192315.83EDE2A2074@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42396:855661be5f16 Date: 2011-03-02 20:22 +0100 http://bitbucket.org/pypy/pypy/changeset/855661be5f16/ Log: Don't do automatic conversions that change the integer size diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -219,11 +219,11 @@ for i, TARGET in unrolling_arg_tps: if to_free[i]: lltype.free(to_free[i], flavor='raw') - if rarithmetic.r_int is not r_int: - if result is INT: - return cast(lltype.Signed, res) - elif result is UINT or result is SIZE_T: - return cast(lltype.Unsigned, res) + # XXX these don't work on win64 + if result is LONG: + return cast(lltype.Signed, res) + elif result is ULONG or result is SIZE_T: + return cast(lltype.Unsigned, res) return res wrapper._annspecialcase_ = 'specialize:ll' wrapper._always_inline_ = True From commits-noreply at bitbucket.org Wed Mar 2 23:53:18 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 22:53:18 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Details Message-ID: <20110302225318.8636.17197@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/05e7a2f3cd4c/ changeset: r3337:05e7a2f3cd4c branch: extradoc user: arigo date: 2011-03-02 23:53:07 summary: Details affected #: 1 file (115 bytes) --- a/talk/stanford-ee380-2011/talk.txt Wed Mar 02 10:57:24 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.txt Wed Mar 02 14:53:07 2011 -0800 @@ -237,6 +237,9 @@ * A priori similar to CPython, but written in RPython. +* RPython is also valid Python: we test extensively by running + it on top of CPython + * See demo (py.py) @@ -255,7 +258,8 @@ * Completely valid Python (can be tested directly) -* Can use lists, dicts, tuples, classes and instances, and so on +* Can use lists, dicts, tuples, classes and instances, and so on, + but it must be type-safe * Contains no garbage collection detail (Py_INCREF/Py_DECREF in CPython) Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Wed Mar 2 23:53:42 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Wed, 02 Mar 2011 22:53:42 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Regenerate html. Message-ID: <20110302225342.8637.43647@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/5fa836e727c0/ changeset: r3338:5fa836e727c0 branch: extradoc user: arigo date: 2011-03-02 23:53:35 summary: Regenerate html. affected #: 1 file (295 bytes) --- a/talk/stanford-ee380-2011/talk.html Wed Mar 02 14:53:07 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.html Wed Mar 02 14:53:35 2011 -0800 @@ -523,6 +523,8 @@

PyPy's Python interpreter

  • A priori similar to CPython, but written in RPython.
  • +
  • RPython is also valid Python: we test extensively by running +it on top of CPython
  • See demo (py.py)
@@ -538,7 +540,8 @@

RPython is still mostly Python

Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 10:23:26 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 09:23:26 -0000 Subject: [pypy-svn] commit/extradoc: fijal: Initial draft of the talk Message-ID: <20110303092326.10793.82439@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/a8719125d81a/ changeset: r3339:a8719125d81a branch: extradoc user: fijal date: 2011-03-03 10:23:06 summary: Initial draft of the talk affected #: 1 file (3.5 KB) Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 10:41:11 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 10:41:11 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed opcode spellings. Message-ID: <20110303094111.959D22A2073@codespeak.net> Author: tav Branch: Changeset: r42397:3a85ccb8bdc8 Date: 2011-03-03 09:40 +0000 http://bitbucket.org/pypy/pypy/changeset/3a85ccb8bdc8/ Log: Fixed opcode spellings. diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py --- a/pypy/translator/c/gcc/trackgcroot.py +++ b/pypy/translator/c/gcc/trackgcroot.py @@ -456,7 +456,7 @@ 'inc', 'dec', 'not', 'neg', 'or', 'and', 'sbb', 'adc', 'shl', 'shr', 'sal', 'sar', 'rol', 'ror', 'mul', 'imul', 'div', 'idiv', 'bswap', 'bt', 'rdtsc', - 'punpck', 'pshufd', 'pcmp', 'pand', 'psslw', 'pssld', 'psslq', + 'punpck', 'pshufd', 'pcmp', 'pand', 'psllw', 'pslld', 'psllq', # zero-extending moves should not produce GC pointers 'movz', ]) From commits-noreply at bitbucket.org Thu Mar 3 10:44:13 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 3 Mar 2011 10:44:13 +0100 (CET) Subject: [pypy-svn] pypy out-of-line-guards: Add a new field Message-ID: <20110303094413.BD2BA2A2073@codespeak.net> Author: Maciej Fijalkowski Branch: out-of-line-guards Changeset: r42398:76661debaeba Date: 2011-03-03 11:37 +0200 http://bitbucket.org/pypy/pypy/changeset/76661debaeba/ Log: Add a new field diff --git a/pypy/rpython/lltypesystem/rclass.py b/pypy/rpython/lltypesystem/rclass.py --- a/pypy/rpython/lltypesystem/rclass.py +++ b/pypy/rpython/lltypesystem/rclass.py @@ -86,10 +86,13 @@ # a linked-list of assembler codes to invalidate in case jit_invariant_fields # are modified -# address is a pointer to a loop-level one-element raw array that's -# either 0 or 1 + +# flag - an invalidated flag (0 or 1), used during tracing +# address - backend-dependent number. typically an address of assembler to patch ASMCODE = lltype.GcForwardReference() -ASMCODE.become(GcStruct('asmcode', ('address', lltype.Signed), +ASMCODE.become(GcStruct('asmcode', + ('address', lltype.Signed), + ('flag', lltype.Signed), ('next', lltype.Ptr(ASMCODE)))) ASMCODE_APPENDER = lltype.FuncType([llmemory.GCREF, lltype.Signed], From commits-noreply at bitbucket.org Thu Mar 3 10:44:14 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 3 Mar 2011 10:44:14 +0100 (CET) Subject: [pypy-svn] pypy default: typo :-/ Message-ID: <20110303094414.49F072A2073@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42399:7d403820d973 Date: 2011-03-03 11:42 +0200 http://bitbucket.org/pypy/pypy/changeset/7d403820d973/ Log: typo :-/ diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py --- a/pypy/translator/c/gcc/trackgcroot.py +++ b/pypy/translator/c/gcc/trackgcroot.py @@ -456,7 +456,7 @@ 'inc', 'dec', 'not', 'neg', 'or', 'and', 'sbb', 'adc', 'shl', 'shr', 'sal', 'sar', 'rol', 'ror', 'mul', 'imul', 'div', 'idiv', 'bswap', 'bt', 'rdtsc', - 'punpck', 'pshufd', 'pcmp', 'pand', 'psslw', 'pssld', 'psslq', + 'punpck', 'pshufd', 'pcmp', 'pand', 'psllw', 'pslld', 'psllq', # zero-extending moves should not produce GC pointers 'movz', ]) From commits-noreply at bitbucket.org Thu Mar 3 10:44:14 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 3 Mar 2011 10:44:14 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110303094414.80B352A2075@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42400:ed97f0832e66 Date: 2011-03-03 11:43 +0200 http://bitbucket.org/pypy/pypy/changeset/ed97f0832e66/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 3 11:18:47 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 11:18:47 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed failing restructuredtext syntax test. Message-ID: <20110303101847.0D1502A2073@codespeak.net> Author: tav Branch: Changeset: r42401:c0796586c843 Date: 2011-03-03 10:18 +0000 http://bitbucket.org/pypy/pypy/changeset/c0796586c843/ Log: Fixed failing restructuredtext syntax test. diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ b/pypy/doc/discussion/thoughts_string_interning.txt @@ -1,5 +1,5 @@ String Interning in PyPy -=========================== +======================== A few thoughts about string interning. CPython gets a remarkable speed-up by interning strings. Interned are all builtin string @@ -9,7 +9,7 @@ saving the need to do a string comparison. Interned Strings in CPython --------------------------- +--------------------------- CPython keeps an internal dictionary named ``interned`` for all of these strings. It contains the string both as key and as value, which means @@ -73,7 +73,7 @@ way to express this (less refcount, exclusion from Boehm, whatever). A prototype brute-force patch --------------------------------- +----------------------------- In order to get some idea how efficient string interning is at the moment, I implemented a quite crude version of interning. I patched space.wrap From commits-noreply at bitbucket.org Thu Mar 3 12:19:39 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 12:19:39 +0100 (CET) Subject: [pypy-svn] pypy default: Added the various *.html files to .gitignore. Message-ID: <20110303111939.73FB12A2076@codespeak.net> Author: tav Branch: Changeset: r42402:0461589dd52f Date: 2011-03-03 11:17 +0000 http://bitbucket.org/pypy/pypy/changeset/0461589dd52f/ Log: Added the various *.html files to .gitignore. diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,10 @@ include/*.h lib_pypy/ctypes_config_cache/_[^_]*_*.py pypy/_cache +pypy/doc/*.html +pypy/doc/config/*.html +pypy/doc/discussion/*.html +pypy/translator/c/src/dtoa.o pypy/translator/goal/pypy-c pypy/translator/goal/target*-c release/ \ No newline at end of file From commits-noreply at bitbucket.org Thu Mar 3 12:19:40 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 12:19:40 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed syntax errors in the faq.txt file. Message-ID: <20110303111940.3328E2A2076@codespeak.net> Author: tav Branch: Changeset: r42403:6505daab89bb Date: 2011-03-03 11:17 +0000 http://bitbucket.org/pypy/pypy/changeset/6505daab89bb/ Log: Fixed syntax errors in the faq.txt file. diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt --- a/pypy/doc/faq.txt +++ b/pypy/doc/faq.txt @@ -6,7 +6,7 @@ General -======================================================================== +======= ------------- What is PyPy? @@ -146,9 +146,9 @@ .. _`prolog and javascript`: ---------------------------------------------------------------- +---------------------------------------------------------------- Can PyPy support interpreters for other languages beyond Python? ---------------------------------------------------------------- +---------------------------------------------------------------- The toolsuite that translates the PyPy interpreter is quite general and can be used to create optimized versions of interpreters @@ -167,8 +167,9 @@ .. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 .. _`All of them`: http://codespeak.net/svn/pypy/lang/ + Development -======================================================================== +=========== ----------------------------------------------------------- How do I get into PyPy development? Can I come to sprints? @@ -219,8 +220,9 @@ This will disable SELinux's protection and allow PyPy to configure correctly. Be sure to enable it again if you need it! + PyPy translation tool chain -======================================================================== +=========================== ---------------------------------------- Can PyPy compile normal Python programs? @@ -298,9 +300,9 @@ .. _`RPython description`: coding-guide.html#restricted-python -------------------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? -------------------------------------------------------------------------- +--------------------------------------------------------------- +Does RPython have anything to do with Zope's Restricted Python? +--------------------------------------------------------------- No. `Zope's RestrictedPython`_ aims to provide a sandboxed execution environment for CPython. `PyPy's RPython`_ is the implementation @@ -395,9 +397,9 @@ .. _`how do I compile my own interpreters`: --------------------------------------- +------------------------------------- How do I compile my own interpreters? --------------------------------------- +------------------------------------- Start from the example of `pypy/translator/goal/targetnopstandalone.py`_, which you compile by From commits-noreply at bitbucket.org Thu Mar 3 12:19:40 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 12:19:40 +0100 (CET) Subject: [pypy-svn] pypy default: Added workaround for failing tests on systems w/o graphviz. Message-ID: <20110303111940.1C9302A2073@codespeak.net> Author: tav Branch: Changeset: r42404:0f0f9d16dd20 Date: 2011-03-03 11:19 +0000 http://bitbucket.org/pypy/pypy/changeset/0f0f9d16dd20/ Log: Added workaround for failing tests on systems w/o graphviz. diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py --- a/py/_plugin/pytest_restdoc.py +++ b/py/_plugin/pytest_restdoc.py @@ -77,8 +77,12 @@ try: self._checkskip(path, self.project.get_htmloutputpath(path)) self.project.process(path) - except KeyboardInterrupt: - raise + except KeyboardInterrupt: + raise + except SystemExit, error: + if error.message == "ERROR: dot not found": + py.test.skip("system doesn't have graphviz installed") + raise except SystemMessage: # we assume docutils printed info on stdout py.test.fail("docutils processing failed, see captured stderr") From commits-noreply at bitbucket.org Thu Mar 3 12:23:02 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 3 Mar 2011 12:23:02 +0100 (CET) Subject: [pypy-svn] pypy default: Document hints Message-ID: <20110303112302.C43982A2073@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42405:ea0d96b9dbf4 Date: 2011-03-03 13:21 +0200 http://bitbucket.org/pypy/pypy/changeset/ea0d96b9dbf4/ Log: Document hints diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py --- a/pypy/rlib/jit.py +++ b/pypy/rlib/jit.py @@ -25,7 +25,14 @@ """ Hint for the JIT possible arguments are: - XXX + + * promote - promote the argument from a variable into a constant + * access_directly - directly access a virtualizable, as a structure + and don't treat it as a virtualizable + * fresh_virtualizable - means that virtualizable was just allocated. + Useful in say Frame.__init__ when we do want + to store things directly on it. Has to come with + access_directly=True """ return x From commits-noreply at bitbucket.org Thu Mar 3 12:23:03 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 3 Mar 2011 12:23:03 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110303112303.7CBF52A2075@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42406:8d7202f10076 Date: 2011-03-03 13:22 +0200 http://bitbucket.org/pypy/pypy/changeset/8d7202f10076/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 3 13:21:53 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 13:21:53 +0100 (CET) Subject: [pypy-svn] pypy default: Updated pickling of dictiters to match CPython. Message-ID: <20110303122153.7C4962A2075@codespeak.net> Author: tav Branch: Changeset: r42407:1b3dcfdd2203 Date: 2011-03-03 12:21 +0000 http://bitbucket.org/pypy/pypy/changeset/1b3dcfdd2203/ Log: Updated pickling of dictiters to match CPython. diff --git a/pypy/interpreter/test/test_zzpickle_and_slow.py b/pypy/interpreter/test/test_zzpickle_and_slow.py --- a/pypy/interpreter/test/test_zzpickle_and_slow.py +++ b/pypy/interpreter/test/test_zzpickle_and_slow.py @@ -359,16 +359,14 @@ raises(TypeError, len, liter) assert list(liter) == list(result) - @py.test.mark.xfail + # This test used to be marked xfail and it tried to test for the past + # support of pickling dictiter objects. def test_pickle_dictiter(self): import pickle tdict = {'2':2, '3':3, '5':5} diter = iter(tdict) diter.next() - pckl = pickle.dumps(diter) - result = pickle.loads(pckl) - raises(TypeError, len, diter) - assert list(diter) == list(result) + raises(TypeError, pickle.dumps, diter) def test_pickle_reversed(self): import pickle diff --git a/pypy/objspace/std/dicttype.py b/pypy/objspace/std/dicttype.py --- a/pypy/objspace/std/dicttype.py +++ b/pypy/objspace/std/dicttype.py @@ -154,8 +154,8 @@ w_typeobj = space.gettypeobject(dictiter_typedef) raise OperationError( - space.w_RuntimeError, - space.wrap("cannot pickle dictiters with multidicts")) + space.w_TypeError, + space.wrap("can't pickle dictionary-keyiterator objects")) # XXXXXX get that working again # we cannot call __init__ since we don't have the original dict From commits-noreply at bitbucket.org Thu Mar 3 16:13:41 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 16:13:41 +0100 (CET) Subject: [pypy-svn] pypy default: Added support for version info from both hg/git. Message-ID: <20110303151341.829902A2073@codespeak.net> Author: tav Branch: Changeset: r42408:4568a57a31bf Date: 2011-03-03 15:13 +0000 http://bitbucket.org/pypy/pypy/changeset/4568a57a31bf/ Log: Added support for version info from both hg/git. diff --git a/pypy/tool/udir.py b/pypy/tool/udir.py --- a/pypy/tool/udir.py +++ b/pypy/tool/udir.py @@ -21,7 +21,7 @@ import os, sys import py -from pypy.tool.version import get_mercurial_info +from pypy.tool.version import get_repo_version_info from py.path import local PYPY_KEEP = int(os.environ.get('PYPY_USESSION_KEEP', '3')) @@ -30,7 +30,7 @@ if dir is not None: dir = local(dir) if basename is None: - info = get_mercurial_info() + info = get_repo_version_info() if info: project, hgtag, hgid = info basename = hgtag diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -63,7 +63,7 @@ 'version' : 'version.get_version(space)', 'pypy_version_info' : 'version.get_pypy_version_info(space)', 'subversion' : 'version.get_subversion_info(space)', - '_mercurial' : 'version.wrap_mercurial_info(space)', + '_mercurial' : 'version.get_repo_info(space)', 'hexversion' : 'version.get_hexversion(space)', 'displayhook' : 'hook.displayhook', diff --git a/pypy/tool/test/test_version.py b/pypy/tool/test/test_version.py --- a/pypy/tool/test/test_version.py +++ b/pypy/tool/test/test_version.py @@ -1,8 +1,8 @@ import os, sys import py -from pypy.tool.version import get_mercurial_info +from pypy.tool.version import get_repo_version_info -def test_get_mercurial_info(): - assert get_mercurial_info(None) - assert get_mercurial_info(os.devnull) == ('PyPy', '?', '?') - assert get_mercurial_info(sys.executable) == ('PyPy', '?', '?') +def test_get_repo_version_info(): + assert get_repo_version_info(None) + assert get_repo_version_info(os.devnull) == ('PyPy', '?', '?') + assert get_repo_version_info(sys.executable) == ('PyPy', '?', '?') diff --git a/pypy/module/sys/version.py b/pypy/module/sys/version.py --- a/pypy/module/sys/version.py +++ b/pypy/module/sys/version.py @@ -28,7 +28,7 @@ import pypy pypydir = os.path.dirname(os.path.abspath(pypy.__file__)) del pypy -from pypy.tool.version import get_mercurial_info +from pypy.tool.version import get_repo_version_info import time as t gmtime = t.gmtime() @@ -67,7 +67,7 @@ CPYTHON_VERSION[0], CPYTHON_VERSION[1], CPYTHON_VERSION[2], - hg_universal_id(), + get_repo_version_info()[2], date, time, ver, @@ -83,32 +83,22 @@ def get_pypy_version_info(space): ver = PYPY_VERSION - #ver = ver[:-1] + (svn_revision(),) w_version_info = app.wget(space, "version_info") return space.call_function(w_version_info, space.wrap(ver)) def get_subversion_info(space): return space.wrap(('PyPy', '', '')) - -def wrap_mercurial_info(space): - info = get_mercurial_info() +def get_repo_info(space): + info = get_repo_version_info() if info: - project, hgtag, hgid = info + project, repo_tag, repo_version = info return space.newtuple([space.wrap(project), - space.wrap(hgtag), - space.wrap(hgid)]) + space.wrap(repo_tag), + space.wrap(repo_version)]) else: return space.w_None -def hg_universal_id(): - info = get_mercurial_info() - if info: - return info[2] - else: - return '?' - - def tuple2hex(ver): d = {'alpha': 0xA, 'beta': 0xB, diff --git a/pypy/tool/version.py b/pypy/tool/version.py --- a/pypy/tool/version.py +++ b/pypy/tool/version.py @@ -4,23 +4,51 @@ import pypy pypydir = os.path.dirname(os.path.abspath(pypy.__file__)) -def get_mercurial_info(hgexe=None): - '''Obtain Mercurial version information by invoking the 'hg' command.''' +def get_repo_version_info(hgexe=None): + '''Obtain version information by invoking the 'hg' or 'git' commands.''' # TODO: support extracting from .hg_archival.txt default_retval = 'PyPy', '?', '?' pypyroot = os.path.abspath(os.path.join(pypydir, '..')) - if hgexe is None: - hgexe = py.path.local.sysfind('hg') - def maywarn(err): + def maywarn(err, repo_type='Mercurial'): if not err: return from pypy.tool.ansi_print import ansi_log log = py.log.Producer("version") py.log.setconsumer("version", ansi_log) - log.WARNING('Errors getting Mercurial information: %s' % err) + log.WARNING('Errors getting %s information: %s' % (repo_type, err)) + + # Try to see if we can get info from Git if hgexe is not specified. + if not hgexe: + if os.path.isdir(os.path.join(pypyroot, '.git')): + gitexe = py.path.local.sysfind('git') + if gitexe: + try: + p = Popen( + [str(gitexe), 'describe', '--tags', '--always'], + stdout=PIPE, stderr=PIPE + ) + except OSError, e: + maywarn(e, 'Git') + return default_retval + if p.wait() != 0: + maywarn(p.stderr.read(), 'Git') + return default_retval + tag = p.stdout.read().strip() + p = Popen( + [str(gitexe), 'rev-parse', 'HEAD'], + stdout=PIPE, stderr=PIPE + ) + if p.wait() != 0: + maywarn(p.stderr.read(), 'Git') + return 'PyPy', tag, '?' + return 'PyPy', tag, p.stdout.read().strip()[:12] + + # Fallback to trying Mercurial. + if hgexe is None: + hgexe = py.path.local.sysfind('hg') if not os.path.isdir(os.path.join(pypyroot, '.hg')): maywarn('Not running from a Mercurial repository!') From commits-noreply at bitbucket.org Thu Mar 3 16:58:54 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 15:58:54 -0000 Subject: [pypy-svn] commit/extradoc: 4 new changesets Message-ID: <20110303155854.10794.30438@bitbucket03.managed.contegix.com> 4 new changesets in extradoc: http://bitbucket.org/pypy/extradoc/changeset/02e265dc2a8b/ changeset: r3340:02e265dc2a8b branch: extradoc user: lac date: 2011-03-03 15:43:11 summary: arigo? This must be one of Armin's because I did not edit this file. But if I have instead gone and clobbered his changes with an earlier version a) I apologise and b) I have no clue how this could happen given that he checked in things later than I. affected #: 1 file (7 bytes) --- a/talk/stanford-ee380-2011/talk.html Wed Mar 02 10:35:16 2011 -0800 +++ b/talk/stanford-ee380-2011/talk.html Thu Mar 03 15:43:11 2011 +0100 @@ -364,8 +364,8 @@ print Foo("hello").double().value -
-

In two words

+
+

In two points

  • Strongly, trivially, dynamically typed language
  • Ints, floats, longs, string, unicode, @@ -461,6 +461,16 @@
  • Now contains about 200 KLoC, and 150 KLoc of tests
+
+

A bit of history

+
    +
  • Squeak and Scheme48 are also interpreters written in themselves
  • +
  • Or more precisely, like PyPy, a subset of themselves
  • +
  • But in PyPy, the subset is at a higher level
  • +
  • General rule: every aspect that is independent from the high-level +description of the interpreter is left out of it
  • +
+

What is the point of PyPy?

    @@ -524,16 +534,6 @@
  • See demo
-
-

A bit of history

-
    -
  • Squeak and Scheme48 are also interpreters written in themselves
  • -
  • Or more precisely, like PyPy, a subset of themselves
  • -
  • But in PyPy, the RPython subset is at a higher level
  • -
  • General rule: every aspect that is independent from the high-level -description of the interpreter is left out of RPython
  • -
-

RPython is still mostly Python

    @@ -798,7 +798,7 @@
    • The hard part: finding all pointers to GC objects from local variables in the C stack
    • -
    • ANSI C solution: all pointers are copied to and fro some custom stack
    • +
    • ANSI C solution: all pointers are copied to and from some custom stack
    • Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables
@@ -816,9 +816,9 @@

What is a JIT

    -
  • A JIT selects pieces of the user program (say Java) that would benefit +
  • A JIT selects pieces of the user program (in language P) that would benefit from compilation instead of interpretation
  • -
  • A "method JIT" selects individual Java functions and compiles them, +
  • A "method JIT" selects individual P functions and compiles them, possibly doing some inlining to improve performance (HotSpot, Psyco)
  • A "tracing JIT" selects individual code paths from loops and compiles them, inlining aggressively (TraceMonkey, PyPy)
  • @@ -897,7 +897,7 @@
  • Turns a trace into machine code
  • Simple register allocation (linear code)
  • x86, x86-64, (ARM)
  • -
  • Guards compiled as conditional jumps to code that restore the full state
  • +
  • Guards compiled as conditional jumps to code that restores the full state
http://bitbucket.org/pypy/extradoc/changeset/f25bd2ac1be9/ changeset: r3341:f25bd2ac1be9 branch: extradoc user: lac date: 2011-03-03 15:45:32 summary: intial revision. notes about the google sf talk affected #: 1 file (4.3 KB) --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/ustour2011/rawnotes/googlesf Thu Mar 03 15:45:32 2011 +0100 @@ -0,0 +1,88 @@ +The building is wonderfully located with views of the Bay. The cafeteria +was most pleasant, and the food was good. Google has 4 floors. 2 of them +are full of engineers - app engine is here among others. One of them is +full of Google staff that aren't engineers. I don't know what they do. +The other floor is full of startups, which have some sort of special +relationship with Google. I never found out how that works, either. + +Several engineers told me that they were really pleased that we chose +to talk to them in SF as well as at the Googleplex. It seems that +most people do not do this, which means they either have to take a +very long train ride to Mt. View to hear neat stuff, or do without. +Just by coming to the SF site made us a lot of fans, it seems. + +Between 25 and 30 engineers showed up. Several (many?) of them were +unknown to either Guido or Wesley Chun, and had to introduce themselves. +Several of them were primarily C++ users. They want seamless integration +with C++ 'natch. 'A way to just import c++ .h files into python' was +mentioned. Some of the people there work primarily on the search engine. +They like prototyping new ideas (in python, I would guess). They spoke of +other people who were not there -- who may or may not work at Google -- +who are designing new experimental languages. Up until this talk they +did not know that PyPy would be good for implementing them. + +Some engineers expressed a desire to come to noisebridge for the sprint. +The mood was entirely upbeat. There were no questions of the 'desiged +to make you look like an idiot, designed to show how much I dislike you, +your code, your approach sort'. It sounded like some people were here out +of sheer intellectual curiosity, but some had real life problems they were +interested in using pypy for. + +They seemed completely unaware that PyPy was not just python in python and +could be used to implement any dynamic language. They think this is bad +marketing on our part and that we should call the toolchain something else +and showcase it separately. + +They have lots and lots of SWIG wrapped things at Google. + +Questions indicated that most people had at least some idea as to what +was going on. + +"are they all boxed?" +"how much memory" +"the gc -- you cannot count on dels happening instantaneously, same as +Jython, IronPython" -- does this apply to weakrefs also? +why didn't you use the ref counting gc +why is ref counting slow? +people wanted more indepth on the jit how it works +why are all the blue lines the same size in speed.pypy.org +what is the difference between us and spidermonkey? +do you dispatch? pass information between trqces? are they parameterised by +the incoming trace information as it comes in? +is there still hotspot analysis to determine what to jit? +memory bloat like psyco? +what is the potential for multithreading? +stackless +tail recursion +release infrastructure so people who want to do language experimentation +can do it on top of pypy. you can already? why didn't we know that? +gameboy got a big laugh +how can you tell if your cextension will run on pypy? If you should rewrite +it in RPython for performance? Why are some c extensions faster than others +when linked to pypy? +SWIG. can we support SWIG to ctypes? How hard would it be? +reflex(?) for importing c++ +include c++ header file +cern sprint. + +So the last impression I got before we went to lunch is that there was one guy +in particular, John whose name I forgot, who works in search engines and +writes c++ who is really interested. Guido and Wesley did not know him, +and several others who attended. "People who write C++ all the time in +Google" may be a new target group for us for funding. They seem to have +concrete problems they want to fix. + +at lunch we discussed +sandboxing +google has some thing for sandboxing c. or 2 things. open source. +'nacl' discussing of why pypy was not a good fit there. + +funding +google app engineers can get money out to a santa barbara group that has +an open source app engine clone 'because they have the .edu' -- somebody +else who has a different one and is just another OS developer doesn't +get money. G + W will think about whether HHU could get money from Google. +Google apparantly loves to send money to postdocs, but is that only to +US ones? G + W will also work on a doc about what we could do to approach +other google managers for money. We will meet in atlanta at pycon and +discuss. http://bitbucket.org/pypy/extradoc/changeset/625cd3d0c300/ changeset: r3342:625cd3d0c300 branch: extradoc user: lac date: 2011-03-03 16:54:56 summary: Guido sent me John Plevyak's last name and email. affected #: 1 file (0 bytes) --- a/talk/ustour2011/rawnotes/googlesf Thu Mar 03 15:45:32 2011 +0100 +++ b/talk/ustour2011/rawnotes/googlesf Thu Mar 03 16:54:56 2011 +0100 @@ -66,7 +66,7 @@ cern sprint. So the last impression I got before we went to lunch is that there was one guy -in particular, John whose name I forgot, who works in search engines and +in particular, John Plevyak, login jplev who works in search engines and writes c++ who is really interested. Guido and Wesley did not know him, and several others who attended. "People who write C++ all the time in Google" may be a new target group for us for funding. They seem to have http://bitbucket.org/pypy/extradoc/changeset/73c218ea0fe2/ changeset: r3343:73c218ea0fe2 branch: extradoc user: lac date: 2011-03-03 16:58:35 summary: merge heads and I hope these are Armins changes I am getting not me clobbering his. affected #: 0 files (0 bytes) --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/pycon2011/whyslow/talk.rst Thu Mar 03 16:58:35 2011 +0100 @@ -0,0 +1,187 @@ +========================================= +Why is Python slow and how PyPy can help? +========================================= + +What's this talk about? +----------------------- + +* short introduction to JITting + +* how does a tracing JIT work + +* semantics that make Python slow/hard to optimize +XXX cross slow + +Short introduction to JITting +----------------------------- + +* run code with the interpreter + +* observe what it does + +* generate optimized machine code for commonly executed paths + +* using runtime knowledge (types, paths taken) + +Tracing JIT +----------- + +* compiles one loop at a time + +* generates linear code paths, recording what the interpreter did + +* for each possible branch, generate a guard, that exits assembler on triggering + +* if guard fails often enough, start tracing from the failure + +Tracing example +--------------- + +* we have cool tools! + +XXX pic + +Part 2 - python semantics +-------------------------- + +* we're going to concentrate on a few really annoying things + +* frame introspection + +* dynamic dispatch + +* boxing + +* dynamic method lookup + +* attribute access + +Dynamic dispatch +---------------- + +* each operation has to dispatch on the type of the first argument + +* ``a + b`` can call integer addition, string concatenation or custom + ``__add__`` method + +* not much to talk about, tracing JIT deals with this without + extra effort + +* it can get fairly complex (XXX http://hg.python.org/cpython/file/6910af7df354/Objects/abstract.c#l761) + +* all of this logic is constant folded (XXX trace) + +Boxing +------ + +* for dynamic dispatch to work, each object has to be packed in a box + +* ``PyIntObject``, ``PyFloatObject`` etc. + +* it's wasteful, because it requires memory allocations (or pooling) + +* ideally we would not allocate them unless they escape + +* frames get in the way (they escape locals and valuestack) + +XXX more traces + +Frame introspection +------------------- + +* ``sys._getframe()``, ``sys.exc_info()`` has to work + +* require creating python frames on the heap + +* not very convinient for fast access or escape analysis + + +Frame introspection - solution +------------------------------ + +* so called "virtualizables" + +* frames are allocated and unpacked on processor (C) stack + +* in case of frame introspection happening, JIT knows where to find necessary values + +* reconstructed frame looks like it has always been there + +Attribute access +---------------- + +* ``obj.attr`` + +* Look for ``__getattribute__`` on ``obj`` + +* check ``type(obj).__dict__`` for a descriptor + +* check ``obj.__dict__`` for a value + +* 3 dict lookups + +Map dicts +------------- + +* for a common case reduces 3 dict lookups to a list lookup + +* makes objects very compact (same as with ``__slots__``) + +* works even for adding attributes later on, after ``__init__`` + +Map dicts - how it works +------------------------- + +* stores a structure remembering common object shapes + +* a dictionary mapping names to numbers in a list + +* a list per object + +* those dictionary lookups are constant-folded away at the + time of JIT compilation + +XXX cool pics + +Dynamic method lookup +--------------------- + +* ``obj.meth()``, what happens? + +* 2 parts: attribute lookup, and method call + +* check ``__dict__`` of ``obj`` and ``type(obj)`` (and the entire MRO) + +* allocate a bound method + +* call the bound method + +Linking it all together +----------------------- + +* array example + +Things we did not talk about +---------------------------- + +* regular expressions + +* generators + +* recursion + +* ``map`` and other looping constructs + +Future directions +----------------- + +* fast ctypes + +* numpy + +Thank you +----------- + +* http://pypy.org + +* http://morepypy.blogspot.com/ --- a/talk/stanford-ee380-2011/talk.html Thu Mar 03 16:54:56 2011 +0100 +++ b/talk/stanford-ee380-2011/talk.html Thu Mar 03 16:58:35 2011 +0100 @@ -523,6 +523,8 @@

PyPy's Python interpreter

  • A priori similar to CPython, but written in RPython.
  • +
  • RPython is also valid Python: we test extensively by running +it on top of CPython
  • See demo (py.py)
@@ -538,7 +540,8 @@

RPython is still mostly Python

  • Completely valid Python (can be tested directly)
  • -
  • Can use lists, dicts, tuples, classes and instances, and so on
  • +
  • Can use lists, dicts, tuples, classes and instances, and so on, +but it must be type-safe
  • Contains no garbage collection detail (Py_INCREF/Py_DECREF in CPython)
  • Really a subset of Python: roughly "how a Java programmer writes his first Python program"
  • @@ -940,6 +943,8 @@
  • http://pypy.org/
  • http://speed.pypy.org/
  • irc: #pypyatfreenode.net
  • +
  • noisebridge sprint this weekend (from 10am): +https://www.noisebridge.net/wiki/Getting_Here
--- a/talk/stanford-ee380-2011/talk.txt Thu Mar 03 16:54:56 2011 +0100 +++ b/talk/stanford-ee380-2011/talk.txt Thu Mar 03 16:58:35 2011 +0100 @@ -237,6 +237,9 @@ * A priori similar to CPython, but written in RPython. +* RPython is also valid Python: we test extensively by running + it on top of CPython + * See demo (py.py) @@ -255,7 +258,8 @@ * Completely valid Python (can be tested directly) -* Can use lists, dicts, tuples, classes and instances, and so on +* Can use lists, dicts, tuples, classes and instances, and so on, + but it must be type-safe * Contains no garbage collection detail (Py_INCREF/Py_DECREF in CPython) @@ -762,3 +766,6 @@ * http://speed.pypy.org/ * irc: ``#pypy at freenode.net`` + +* noisebridge sprint this weekend (from 10am): + https://www.noisebridge.net/wiki/Getting_Here Binary file talk/stanford-ee380-2011/ui/py-web.png has changed --- a/talk/stanford-ee380-2011/ui/py.css Thu Mar 03 16:54:56 2011 +0100 +++ b/talk/stanford-ee380-2011/ui/py.css Thu Mar 03 16:58:35 2011 +0100 @@ -20,7 +20,7 @@ background-image: url("py-web.png"); background-repeat: no-repeat; margin: 3px; - height: 120px; + height: 76px; border-bottom: 1px solid black; } Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 16:59:29 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 16:59:29 +0100 (CET) Subject: [pypy-svn] pypy default: Updated git version info to match hg's tag/branch. Message-ID: <20110303155929.7F7D32A2073@codespeak.net> Author: tav Branch: Changeset: r42409:b9887cd8aab0 Date: 2011-03-03 15:59 +0000 http://bitbucket.org/pypy/pypy/changeset/b9887cd8aab0/ Log: Updated git version info to match hg's tag/branch. diff --git a/pypy/tool/version.py b/pypy/tool/version.py --- a/pypy/tool/version.py +++ b/pypy/tool/version.py @@ -27,7 +27,7 @@ if gitexe: try: p = Popen( - [str(gitexe), 'describe', '--tags', '--always'], + [str(gitexe), 'rev-parse', 'HEAD'], stdout=PIPE, stderr=PIPE ) except OSError, e: @@ -36,15 +36,25 @@ if p.wait() != 0: maywarn(p.stderr.read(), 'Git') return default_retval - tag = p.stdout.read().strip() + revision_id = p.stdout.read().strip()[:12] p = Popen( - [str(gitexe), 'rev-parse', 'HEAD'], + [str(gitexe), 'describe', '--tags', '--exact-match'], stdout=PIPE, stderr=PIPE ) if p.wait() != 0: - maywarn(p.stderr.read(), 'Git') - return 'PyPy', tag, '?' - return 'PyPy', tag, p.stdout.read().strip()[:12] + p = Popen([str(gitexe), 'branch'], stdout=PIPE, stderr=PIPE) + if p.wait() != 0: + maywarn(p.stderr.read(), 'Git') + return 'PyPy', '?', revision_id + branch = '?' + for line in p.stdout.read().strip().split('\n'): + if line.startswith('* '): + branch = line[1:].strip() + if branch == '(no branch)': + branch = '?' + break + return 'PyPy', branch, revision_id + return 'PyPy', p.stdout.read().strip(), revision_id # Fallback to trying Mercurial. if hgexe is None: From commits-noreply at bitbucket.org Thu Mar 3 18:20:32 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 3 Mar 2011 18:20:32 +0100 (CET) Subject: [pypy-svn] pypy default: Added support for use outside the PyPy repo. Message-ID: <20110303172032.0F2ED2A2079@codespeak.net> Author: tav Branch: Changeset: r42410:c6d1e8b6dc1f Date: 2011-03-03 17:20 +0000 http://bitbucket.org/pypy/pypy/changeset/c6d1e8b6dc1f/ Log: Added support for use outside the PyPy repo. diff --git a/pypy/tool/version.py b/pypy/tool/version.py --- a/pypy/tool/version.py +++ b/pypy/tool/version.py @@ -28,7 +28,7 @@ try: p = Popen( [str(gitexe), 'rev-parse', 'HEAD'], - stdout=PIPE, stderr=PIPE + stdout=PIPE, stderr=PIPE, cwd=pypyroot ) except OSError, e: maywarn(e, 'Git') @@ -39,10 +39,13 @@ revision_id = p.stdout.read().strip()[:12] p = Popen( [str(gitexe), 'describe', '--tags', '--exact-match'], - stdout=PIPE, stderr=PIPE + stdout=PIPE, stderr=PIPE, cwd=pypyroot ) if p.wait() != 0: - p = Popen([str(gitexe), 'branch'], stdout=PIPE, stderr=PIPE) + p = Popen( + [str(gitexe), 'branch'], stdout=PIPE, stderr=PIPE, + cwd=pypyroot + ) if p.wait() != 0: maywarn(p.stderr.read(), 'Git') return 'PyPy', '?', revision_id From commits-noreply at bitbucket.org Thu Mar 3 19:27:25 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 3 Mar 2011 19:27:25 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix a typo: r_uint32 was signed! Message-ID: <20110303182725.F01572A2079@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42411:547ec2881995 Date: 2011-03-03 19:25 +0100 http://bitbucket.org/pypy/pypy/changeset/547ec2881995/ Log: Fix a typo: r_uint32 was signed! It works better this way diff --git a/pypy/rlib/rarithmetic.py b/pypy/rlib/rarithmetic.py --- a/pypy/rlib/rarithmetic.py +++ b/pypy/rlib/rarithmetic.py @@ -437,8 +437,8 @@ _predefined_ints = { (True, 64): r_longlong, (False, 64): r_ulonglong, - (True, 32): build_int('r_int32', True, 32), - (False, 32): build_int('r_uint32', True, 32), + (True, LONG_BIT): r_int, + (False, LONG_BIT): r_uint, } # the 'float' C type From commits-noreply at bitbucket.org Thu Mar 3 19:27:27 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 3 Mar 2011 19:27:27 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Translation fixes. Now it fails in rtyping phase! Message-ID: <20110303182727.7B9D12A2079@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42412:7b463bc54073 Date: 2011-03-03 19:26 +0100 http://bitbucket.org/pypy/pypy/changeset/7b463bc54073/ Log: Translation fixes. Now it fails in rtyping phase! diff --git a/pypy/module/_socket/interp_socket.py b/pypy/module/_socket/interp_socket.py --- a/pypy/module/_socket/interp_socket.py +++ b/pypy/module/_socket/interp_socket.py @@ -349,9 +349,12 @@ elif cmd == _c.SIO_KEEPALIVE_VALS: w_onoff, w_time, w_interval = space.unpackiterable(w_option) option_ptr = rffi.cast(lltype.Ptr(_c.tcp_keepalive), value_ptr) - option_ptr.c_onoff = space.uint_w(w_onoff) - option_ptr.c_keepalivetime = space.uint_w(w_time) - option_ptr.c_keepaliveinterval = space.uint_w(w_interval) + rffi.setintfield(option_ptr, 'c_onoff', + space.uint_w(w_onoff)) + rffi.setintfield(option_ptr, 'c_keepalivetime', + space.uint_w(w_time)) + rffi.setintfield(option_ptr, 'c_keepaliveinterval', + space.uint_w(w_interval)) res = _c.WSAIoctl( self.fd, cmd, value_ptr, value_size, diff --git a/pypy/rpython/module/ll_os.py b/pypy/rpython/module/ll_os.py --- a/pypy/rpython/module/ll_os.py +++ b/pypy/rpython/module/ll_os.py @@ -9,7 +9,7 @@ import py from pypy.rpython.module.support import ll_strcpy, OOSupport from pypy.tool.sourcetools import func_with_new_name, func_renamer -from pypy.rlib.rarithmetic import r_longlong +from pypy.rlib.rarithmetic import r_longlong, r_uint from pypy.rpython.extfunc import ( BaseLazyRegistering, lazy_register, register_external) from pypy.rpython.extfunc import registering, registering_if, extdef @@ -1733,7 +1733,7 @@ @registering(rwin32.FormatError) def register_rwin32_FormatError(self): - return extdef([rwin32.DWORD], str, + return extdef([r_uint], str, "rwin32_FormatError", llimpl=rwin32.llimpl_FormatError, ooimpl=rwin32.fake_FormatError) diff --git a/pypy/module/_winreg/interp_winreg.py b/pypy/module/_winreg/interp_winreg.py --- a/pypy/module/_winreg/interp_winreg.py +++ b/pypy/module/_winreg/interp_winreg.py @@ -256,7 +256,7 @@ if ret == rwinreg.ERROR_MORE_DATA: # Resize and retry bufSize *= 2 - bufsize_p[0] = bufSize + bufsize_p[0] = rffi.cast(rwin32.LONG, bufSize) continue if ret != 0: diff --git a/pypy/rlib/rmmap.py b/pypy/rlib/rmmap.py --- a/pypy/rlib/rmmap.py +++ b/pypy/rlib/rmmap.py @@ -540,7 +540,7 @@ FILE_BEGIN = 0 high_ref = lltype.malloc(PLONG.TO, 1, flavor='raw') try: - high_ref[0] = newsize_high + high_ref[0] = rffi.cast(LONG, newsize_high) SetFilePointer(self.file_handle, newsize_low, high_ref, FILE_BEGIN) finally: From commits-noreply at bitbucket.org Thu Mar 3 20:11:02 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 19:11:02 -0000 Subject: [pypy-svn] commit/extradoc: fijal: Copy standford talk Message-ID: <20110303191102.10793.85604@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/1bffc0b97b56/ changeset: r3344:1bffc0b97b56 branch: extradoc user: fijal date: 2011-03-03 20:10:16 summary: Copy standford talk affected #: 1 file (15.5 KB) Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 20:15:13 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 19:15:13 -0000 Subject: [pypy-svn] commit/extradoc: fijal: Strike down the whole "translation" part. Message-ID: <20110303191513.10792.98163@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/9aac744389be/ changeset: r3345:9aac744389be branch: extradoc user: fijal date: 2011-03-03 20:15:05 summary: Strike down the whole "translation" part. affected #: 1 file (4.0 KB) --- a/talk/yelp/talk.txt Thu Mar 03 21:10:16 2011 +0200 +++ b/talk/yelp/talk.txt Thu Mar 03 21:15:05 2011 +0200 @@ -243,16 +243,6 @@ * See demo (py.py) -The translation toolchain -------------------------- - -* Takes a program written in RPython, a custom subset of Python - -* Outputs the "same" program written in C - -* See demo - - RPython is still mostly Python ------------------------------ @@ -272,6 +262,8 @@ RPython meta-programming ------------------------ +* Python is a meta-programming language for RPython + * RPython is actually only a restriction on the code after being imported, so we can build up everything in (normal) full Python:: @@ -313,16 +305,6 @@ * Then the interpreter is written as methods on this frame object -The object space ----------------- - -* Implements all the built-in types - -* Structure more flexible than CPython's family of C functions - -* Very open to experimentation - - Separation of levels -------------------- @@ -416,180 +398,6 @@ - -Architecture: the translation toolchain ---------------------------------------------------------------------- - - -Overview --------- - -* "Translation toolchain": statically compiles RPython code - -* Produces C code (or JVM or .NET code, experimentally) - -* Every aspect that is independent from the high-level - description of the interpreter is left out of RPython - -* Instead, they are added during translation - -* PyPy = hybrid "research base" + "production-ready" - - -Translation overview (1) ------------------------- - -* Start with the live RPython program - -* Build the Control Flow Graphs (CFGs) of the functions - -* Perform global type inference - -* We get a type-annotated version of the CFGs - -* Demo - - -Translation overview (2) ------------------------- - -* "Lower" the level of the CFGs: transform their Python-like operations - into C-like operations - -* Do a number of additional transformations to insert the selected "aspects" - -* Generate C code from the low-level CFGs - - -Various aspects ---------------- - -* The object model, e.g. how to turn RPython classes and instances - to C structs - -* Garbage collection - -* Execution model: regular or stackless - -* Just-in-Time compiler - - -The object model ----------------- - -* Called "RTyping" internally - -* Can target "lltype" or "ootype" - -* "lltype" = low-level types = C-like structs and arrays - -* "ootype" = object-oriented types, for JVM or .NET - - -The execution model -------------------- - -* Optionally do a "stackless transformation" - -* We get microthread capabilities (soft threads) - -* Even if the source code of the interpreter is just recursive - - - - -Garbage collection ---------------------------------------------------------------------- - - -Purpose -------- - -* RPython assumes automatic memory management, like Python - -* But of course C code does not - -* We can use the Boehm GC, but it is far too slow - -* Remember that our GC needs to support both allocating Python-visible - objects and internal objects of the interpreter (lists, instances...) - - -Overview --------- - -* We wrote our own GCs, and each alloc operation in the CFGs is replaced - with a call to the GC - -* Handles finding and freeing unused memory - -* The GC is written in RPython, too - -* Analyzed like the rest of the program during translation - -* This approach allows testing at all levels - - -The GCs we have written ------------------------ - -* Currently used: "minimark", a generational GC with one young generation - and using mark-and-sweep for the old generation - -* Previously: a hybrid collector using generational semi-space collection - and mark-and-sweep for the oldest generation (too complicated) - -* Pretty standard, non-concurrent, non-thread-safe collectors - - -Old experiments ---------------- - -* Reference counting (like CPython)... Does not work well. - -* Mark-and-sweep, a fully non-moving collector - -* Mark-and-compact, a fully compacting, generationless collector, - similar to Squeak. - -* Lesson learned: using a generational collector is essential for - dynamic languages like Python - - -GC transformer --------------- - -* Inserting a GC in a program being translated is handled by the "GC - transformer" - -* Easy to customize, no fixed API - - -API example (minimark GC) -------------------------- - -* The GC provides functions like "malloc" - -* Plus a number of others: hash, identity_hash, weakref support, - finalizer support - -* The GC transformer inserts tables describing the structure of - RPython objects: sizes, location of further references, etc. - - -Finding the stack roots ------------------------ - -* The hard part: finding all pointers to GC objects from local variables - in the C stack - -* ANSI C solution: all pointers are copied to and from some custom stack - -* Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables - - - - Just-in-Time Compiler --------------------------------------------------------------------- Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 21:09:42 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 20:09:42 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Starting to write the first section of the Yelp talk. Message-ID: <20110303200942.8635.37291@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/545ef999d96a/ changeset: r3348:545ef999d96a branch: extradoc user: arigo date: 2011-03-03 21:09:28 summary: Starting to write the first section of the Yelp talk. affected #: 4 files (115.2 KB) Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Thu Mar 3 22:11:48 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 3 Mar 2011 22:11:48 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix annotation of the rsre module Message-ID: <20110303211148.D5F182A2079@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42413:942863678466 Date: 2011-03-03 22:09 +0100 http://bitbucket.org/pypy/pypy/changeset/942863678466/ Log: Fix annotation of the rsre module diff --git a/pypy/rlib/rsre/rsre_char.py b/pypy/rlib/rsre/rsre_char.py --- a/pypy/rlib/rsre/rsre_char.py +++ b/pypy/rlib/rsre/rsre_char.py @@ -5,7 +5,7 @@ from pypy.rlib.rlocale import tolower, isalnum from pypy.rlib.unroll import unrolling_iterable from pypy.rlib import jit -from pypy.rlib.rarithmetic import int_between +from pypy.rlib.rarithmetic import int_between, r_int # Note: the unicode parts of this module require you to call # rsre_char.set_unicode_db() first, to select one of the modules @@ -58,7 +58,7 @@ assert unicodedb is not None char_ord = unicodedb.tolower(char_ord) elif flags & SRE_FLAG_LOCALE: - return tolower(char_ord) + return r_int(tolower(char_ord)) else: if int_between(ord('A'), char_ord, ord('Z') + 1): # ASCII lower char_ord += ord('a') - ord('A') @@ -93,7 +93,7 @@ return unicodedb.isalnum(code) or code == underline def is_loc_alnum(code): - return code < 256 and isalnum(code) + return code < 256 and r_int(isalnum(code)) def is_loc_word(code): return code == underline or is_loc_alnum(code) From commits-noreply at bitbucket.org Thu Mar 3 22:11:49 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 3 Mar 2011 22:11:49 +0100 (CET) Subject: [pypy-svn] pypy real-rffi.INT: Fix "import pypy.jit.metainterp.test", not sure whether this change is right. Message-ID: <20110303211149.CD77B2A2079@codespeak.net> Author: Amaury Forgeot d'Arc Branch: real-rffi.INT Changeset: r42414:a786782ca95c Date: 2011-03-03 22:11 +0100 http://bitbucket.org/pypy/pypy/changeset/a786782ca95c/ Log: Fix "import pypy.jit.metainterp.test", not sure whether this change is right. diff --git a/pypy/jit/codewriter/longlong.py b/pypy/jit/codewriter/longlong.py --- a/pypy/jit/codewriter/longlong.py +++ b/pypy/jit/codewriter/longlong.py @@ -7,7 +7,7 @@ """ import sys -from pypy.rpython.lltypesystem import lltype +from pypy.rpython.lltypesystem import lltype, rffi if sys.maxint > 2147483647: @@ -34,7 +34,7 @@ supports_longlong = True r_float_storage = rarithmetic.r_longlong - FLOATSTORAGE = lltype.SignedLongLong + FLOATSTORAGE = rffi.LONGLONG getfloatstorage = longlong2float.float2longlong getrealfloat = longlong2float.longlong2float diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -1642,7 +1642,7 @@ if longlong.FLOATSTORAGE is lltype.Float: s_FloatStorage = annmodel.SomeFloat() -elif longlong.FLOATSTORAGE is lltype.SignedLongLong: +elif longlong.FLOATSTORAGE is rffi.LONGLONG: s_FloatStorage = annmodel.SomeInteger(knowntype=longlong.r_float_storage) else: assert 0 From commits-noreply at bitbucket.org Fri Mar 4 11:50:14 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Fri, 04 Mar 2011 10:50:14 -0000 Subject: [pypy-svn] commit/extradoc: alex_gaynor: Quick note, I don't have time to implement it this second. Message-ID: <20110304105014.3602.8564@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/3b9a0ada54e5/ changeset: r3352:3b9a0ada54e5 branch: extradoc user: alex_gaynor date: 2011-03-04 11:50:09 summary: Quick note, I don't have time to implement it this second. affected #: 1 file (113 bytes) --- a/planning/jit.txt Thu Mar 03 15:13:29 2011 -0800 +++ b/planning/jit.txt Fri Mar 04 05:50:09 2011 -0500 @@ -76,6 +76,11 @@ maybe we should move promote even higher, before the first use and we could possibly remove more stuff? +- f31 = f17 * f16 + f32 = f16 * f17 + + Should be just a matter of synthesizing reverse operations in rewrite.py + PYTHON EXAMPLES --------------- Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Fri Mar 4 00:13:51 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Thu, 03 Mar 2011 23:13:51 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Finish the yelp talk, mostly by killing the slides that are superfluous. Message-ID: <20110303231351.6404.18354@bitbucket01.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/fc3d3c4a18be/ changeset: r3351:fc3d3c4a18be branch: extradoc user: arigo date: 2011-03-04 00:13:29 summary: Finish the yelp talk, mostly by killing the slides that are superfluous. affected #: 1 file (5.5 KB) --- a/talk/ustour2011/yelp-talk.txt Thu Mar 03 23:34:17 2011 +0100 +++ b/talk/ustour2011/yelp-talk.txt Thu Mar 03 15:13:29 2011 -0800 @@ -144,7 +144,7 @@ * On running PyPy's translation toolchain on 32-bits: 1.7GB with PyPy (including the JIT machine code), versus 1.2GB with CPython -* Experimental support for 32-bit "pointers" on 64-bit platforms +* Experimental support for 32-bit "compact pointers" on 64-bit platforms Just-in-Time Compilation @@ -168,6 +168,8 @@ could get much better GCs) --- so __del__ methods are not called immediately and predictively +* Apart from that, it is really 99.99% compatible + Stackless Python ---------------- @@ -218,20 +220,20 @@ Other ways to use C libraries ----------------------------- -* Use ctypes - -* (It is soon going to be fast on top of PyPy, too) - -* Example: pyexpat, sqlite3 +* Use ctypes (it is soon going to be fast on top of PyPy). + Example: pyexpat, sqlite3 * Or write it as an RPython module built into PyPy, but that's more involved +* More ways could be possible, given work (SWIG backend, + Cython backend, C++ Reflex, etc...) - +Architecture +---------------------------------------------------------------------- Architecture @@ -253,8 +255,6 @@ * RPython is also valid Python: we test extensively by running it on top of CPython -* See demo (py.py) - The translation toolchain ------------------------- @@ -263,8 +263,6 @@ * Outputs the "same" program written in C -* See demo - RPython is still mostly Python ------------------------------ @@ -296,146 +294,8 @@ * here, the code in ``f()`` is RPython, but the loop around it is not. - - -Architecture: the interpreter --------------------------------------------------------------------------- - - -Overview of the interpreter ---------------------------- - -* A compiler that produces a custom bytecode format - -* An interpreter for this bytecode - -* A large library of object types (the "object space") - -* A collection of extension modules - - -The bytecode interpreter ------------------------- - -* A straightforward, recursive interpreter - -* Stack-based - -* Every call to a Python function makes a frame object - -* Then the interpreter is written as methods on this frame object - - -The object space ----------------- - -* Implements all the built-in types - -* Structure more flexible than CPython's family of C functions - -* Very open to experimentation - - -Separation of levels --------------------- - -* Important: *all* objects that appear in the interpreted program are, - in the interpreter, instances of W_XxxObject. - -* Again, similar to CPython: an object in Python is implemented, - in the interpreter, as a C structure PyXxxObject. - - -Example: smalllong ------------------- - -* Standard Python types: int (32/64-bit) and long - (integer of unlimited size) - -* In CPython, the type is directly linked to its (single) implementation in C. - In PyPy, it is not. - -* So we could easily add an implementation W_SmallLongObject for - integers that happen to fit in 64 bits - -* And there is also W_LongObject for the general case - - -Example: smallint ------------------ - -* *Tagged integers,* common in interpreters (but not in CPython) - -* Idea, in C terms: take the integer objects whose value fits in 31/63 - bits, and encode them as odd-valued pseudo-pointers, instead of - pointers to separately-allocated integer objects - -* We did it in PyPy, but it's disabled now because it does not give - the expected performance gain - - -Example: multidict ------------------- - -* Similarly, we have several implementations of dict - -* For the different typical usage patterns of dicts in Python - -* E.g. module dicts (containing all global names of a module), - class dicts, instance dicts, user dicts (typically containing - non-string keys) - - -Example: mapdict ----------------- - -* An instance in Python uses a dictionary to store attributes:: - - >>> x = MyClass() - >>> x.a = 5 - >>> x.__dict__ - {'a': 5} - >>> x.__dict__ = {'b': 6} - >>> x.b - 6 - - -Example: mapdict ----------------- - -* An instance is thus two objects: a dict and a wrapper around it - -* Requires a lot of memory - -* This is different than Java, Smalltalk or C++, where the class - enforces the exact set of attributes of its instances - -* But it is like Self and JavaScript - - -Maps ----------------- - -* We can reuse the technique introduced in Self: "maps" - -* The JavaScript engine V8 also uses them, calling them "hidden classes" - -* Idea: it is likely that a lot of instances of a given class will - have the same set of attributes - -* So we split the attributes into a per-instance part (just an array of - field values) and a shared part (giving the attribute names, and their - indices in the arrays of the individual instances). - - - - -Architecture: the translation toolchain ---------------------------------------------------------------------- - - -Overview --------- +Translation toolchain +--------------------- * "Translation toolchain": statically compiles RPython code @@ -446,8 +306,6 @@ * Instead, they are added during translation -* PyPy = hybrid "research base" + "production-ready" - Translation overview (1) ------------------------ @@ -482,125 +340,11 @@ * Garbage collection -* Execution model: regular or stackless +* Execution model: regular (recursive) or stackless * Just-in-Time compiler -The object model ----------------- - -* Called "RTyping" internally - -* Can target "lltype" or "ootype" - -* "lltype" = low-level types = C-like structs and arrays - -* "ootype" = object-oriented types, for JVM or .NET - - -The execution model -------------------- - -* Optionally do a "stackless transformation" - -* We get microthread capabilities (soft threads) - -* Even if the source code of the interpreter is just recursive - - - - -Garbage collection ---------------------------------------------------------------------- - - -Purpose -------- - -* RPython assumes automatic memory management, like Python - -* But of course C code does not - -* We can use the Boehm GC, but it is far too slow - -* Remember that our GC needs to support both allocating Python-visible - objects and internal objects of the interpreter (lists, instances...) - - -Overview --------- - -* We wrote our own GCs, and each alloc operation in the CFGs is replaced - with a call to the GC - -* Handles finding and freeing unused memory - -* The GC is written in RPython, too - -* Analyzed like the rest of the program during translation - -* This approach allows testing at all levels - - -The GCs we have written ------------------------ - -* Currently used: "minimark", a generational GC with one young generation - and using mark-and-sweep for the old generation - -* Previously: a hybrid collector using generational semi-space collection - and mark-and-sweep for the oldest generation (too complicated) - -* Pretty standard, non-concurrent, non-thread-safe collectors - - -Old experiments ---------------- - -* Reference counting (like CPython)... Does not work well. - -* Mark-and-sweep, a fully non-moving collector - -* Mark-and-compact, a fully compacting, generationless collector, - similar to Squeak. - -* Lesson learned: using a generational collector is essential for - dynamic languages like Python - - -GC transformer --------------- - -* Inserting a GC in a program being translated is handled by the "GC - transformer" - -* Easy to customize, no fixed API - - -API example (minimark GC) -------------------------- - -* The GC provides functions like "malloc" - -* Plus a number of others: hash, identity_hash, weakref support, - finalizer support - -* The GC transformer inserts tables describing the structure of - RPython objects: sizes, location of further references, etc. - - -Finding the stack roots ------------------------ - -* The hard part: finding all pointers to GC objects from local variables - in the C stack - -* ANSI C solution: all pointers are copied to and from some custom stack - -* Not-ANSI-C-at-all: parse the assembler produced by GCC to build tables - - Just-in-Time Compiler Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Sat Mar 5 16:39:37 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 5 Mar 2011 16:39:37 +0100 (CET) Subject: [pypy-svn] pypy default: "hg backout" of 903b44931aec, which breaks some tests left and Message-ID: <20110305153937.4756A282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r42426:182c7c3da62d Date: 2011-03-05 07:38 -0800 http://bitbucket.org/pypy/pypy/changeset/182c7c3da62d/ Log: "hg backout" of 903b44931aec, which breaks some tests left and right. More importantly, you should avoid storing on MetaInterpStaticData a dict that you mutate at run-time, because that's a frozen class. diff --git a/pypy/jit/metainterp/optimizeopt/fficall.py b/pypy/jit/metainterp/optimizeopt/fficall.py --- a/pypy/jit/metainterp/optimizeopt/fficall.py +++ b/pypy/jit/metainterp/optimizeopt/fficall.py @@ -64,8 +64,6 @@ class OptFfiCall(Optimization): - name = 'fficall' - def __init__(self): self.funcinfo = None diff --git a/pypy/jit/metainterp/optimizeopt/string.py b/pypy/jit/metainterp/optimizeopt/string.py --- a/pypy/jit/metainterp/optimizeopt/string.py +++ b/pypy/jit/metainterp/optimizeopt/string.py @@ -367,8 +367,6 @@ "Handling of strings and unicodes." enabled = True - name = 'string' - def reconstruct_for_next_iteration(self, optimizer, valuemap): self.enabled = True return self diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -239,8 +239,6 @@ """Unroll the loop into two iterations. The first one will become the preamble or entry bridge (don't think there is a distinction anymore)""" - - name = 'unroll' def __init__(self, metainterp_sd, loop, optimizations): self.optimizer = Optimizer(metainterp_sd, loop, optimizations) @@ -623,9 +621,6 @@ return self.map[loopbox] class OptInlineShortPreamble(Optimization): - - name = 'inlineshortpreamble' - def __init__(self, retraced): self.retraced = retraced self.inliner = None diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py --- a/pypy/jit/metainterp/warmspot.py +++ b/pypy/jit/metainterp/warmspot.py @@ -62,8 +62,7 @@ def jittify_and_run(interp, graph, args, repeat=1, backendopt=False, trace_limit=sys.maxint, - inline=False, loop_longevity=0, retrace_limit=5, - disable_opts='', **kwds): + inline=False, loop_longevity=0, retrace_limit=5, **kwds): from pypy.config.config import ConfigError translator = interp.typer.annotator.translator try: @@ -82,7 +81,6 @@ jd.warmstate.set_param_inlining(inline) jd.warmstate.set_param_loop_longevity(loop_longevity) jd.warmstate.set_param_retrace_limit(retrace_limit) - jd.warmstate.set_param_disable_opts(disable_opts) warmrunnerdesc.finish() res = interp.eval_graph(graph, args) if not kwds.get('translate_support_code', False): @@ -788,26 +786,16 @@ annhelper = self.annhelper) def rewrite_set_param(self): - from pypy.rpython.lltypesystem.rstr import STR - closures = {} graphs = self.translator.graphs _, PTR_SET_PARAM_FUNCTYPE = self.cpu.ts.get_FuncType([lltype.Signed], lltype.Void) - _, PTR_SET_PARAM_STR_FUNCTYPE = self.cpu.ts.get_FuncType( - [lltype.Ptr(STR)], lltype.Void) - def make_closure(jd, fullfuncname, is_string): + def make_closure(jd, fullfuncname): state = jd.warmstate def closure(i): - if is_string: - i = hlstr(i) getattr(state, fullfuncname)(i) - if is_string: - TP = PTR_SET_PARAM_STR_FUNCTYPE - else: - TP = PTR_SET_PARAM_FUNCTYPE - funcptr = self.helper_func(TP, closure) - return Constant(funcptr, TP) + funcptr = self.helper_func(PTR_SET_PARAM_FUNCTYPE, closure) + return Constant(funcptr, PTR_SET_PARAM_FUNCTYPE) # for graph, block, i in find_set_param(graphs): op = block.operations[i] @@ -819,8 +807,7 @@ funcname = op.args[2].value key = jd, funcname if key not in closures: - closures[key] = make_closure(jd, 'set_param_' + funcname, - funcname == 'disable_opts') + closures[key] = make_closure(jd, 'set_param_' + funcname) op.opname = 'direct_call' op.args[:3] = [closures[key]] diff --git a/pypy/jit/metainterp/test/test_loop_unroll.py b/pypy/jit/metainterp/test/test_loop_unroll.py --- a/pypy/jit/metainterp/test/test_loop_unroll.py +++ b/pypy/jit/metainterp/test/test_loop_unroll.py @@ -1,5 +1,5 @@ import py -from pypy.rlib.jit import OPTIMIZER_FULL, JitDriver +from pypy.rlib.jit import OPTIMIZER_FULL from pypy.jit.metainterp.test import test_loop from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -13,8 +13,6 @@ class OptHeap(Optimization): """Cache repeated heap accesses""" - - name = 'heap' def __init__(self): # cached fields: {descr: {OptValue_instance: OptValue_fieldvalue}} diff --git a/pypy/jit/metainterp/warmstate.py b/pypy/jit/metainterp/warmstate.py --- a/pypy/jit/metainterp/warmstate.py +++ b/pypy/jit/metainterp/warmstate.py @@ -240,12 +240,6 @@ else: raise ValueError("unknown optimizer") - def set_param_disable_opts(self, value): - d = self.warmrunnerdesc.metainterp_sd.disable_opts - for name in value.split(":"): - if name: - d[name] = None - def set_param_loop_longevity(self, value): # note: it's a global parameter, not a per-jitdriver one if (self.warmrunnerdesc is not None and diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -11,27 +11,21 @@ """Optimize loop.operations to remove internal overheadish operations. """ opt_str = OptString() - _optimizations = [OptIntBounds(), - OptRewrite(), - OptVirtualize(), - opt_str, - OptHeap(), - ] - + optimizations = [OptIntBounds(), + OptRewrite(), + OptVirtualize(), + opt_str, + OptHeap(), + ] if inline_short_preamble: - _optimizations = [OptInlineShortPreamble(retraced)] + _optimizations + optimizations = [OptInlineShortPreamble(retraced)] + optimizations if metainterp_sd.jit_ffi: from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall - _optimizations = _optimizations + [ + optimizations = optimizations + [ OptFfiCall(), ] - optimizations = [] - for opt in _optimizations: - if opt.name not in metainterp_sd.disable_opts: - optimizations.append(opt) - if unroll: opt_str.enabled = False # FIXME: Workaround to disable string optimisation # during preamble but to keep it during the loop diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -13,8 +13,6 @@ This includes already executed operations and constants. """ - name = 'rewrite' - def reconstruct_for_next_iteration(self, optimizer, valuemap): return self diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -1231,7 +1231,6 @@ def __init__(self, cpu, options, ProfilerClass=EmptyProfiler, warmrunnerdesc=None, jit_ffi=True): - self.disable_opts = {} self.cpu = cpu self.stats = self.cpu.stats self.options = options diff --git a/pypy/jit/metainterp/test/test_loop.py b/pypy/jit/metainterp/test/test_loop.py --- a/pypy/jit/metainterp/test/test_loop.py +++ b/pypy/jit/metainterp/test/test_loop.py @@ -751,6 +751,7 @@ res = self.meta_interp(f, [200]) + class TestOOtype(LoopTest, OOJitMixin): pass diff --git a/pypy/module/pypyjit/interp_jit.py b/pypy/module/pypyjit/interp_jit.py --- a/pypy/module/pypyjit/interp_jit.py +++ b/pypy/module/pypyjit/interp_jit.py @@ -6,7 +6,7 @@ from pypy.tool.pairtype import extendabletype from pypy.rlib.rarithmetic import r_uint, intmask from pypy.rlib.jit import JitDriver, hint, we_are_jitted, dont_look_inside -from pypy.rlib.jit import current_trace_length, unroll_parameters +from pypy.rlib.jit import current_trace_length import pypy.interpreter.pyopcode # for side-effects from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.pycode import PyCode, CO_GENERATOR @@ -136,17 +136,12 @@ raise OperationError(space.w_ValueError, space.wrap("error in JIT parameters string")) for key, w_value in kwds_w.items(): - if key == 'disable_opts': - pypyjitdriver.set_param('disable_opts', space.str_w(w_value)) - else: - intval = space.int_w(w_value) - for name, _ in unroll_parameters: - if name == key and name != 'disable_opts': - pypyjitdriver.set_param(name, intval) - break - else: - raise operationerrfmt(space.w_TypeError, - "no JIT parameter '%s'", key) + intval = space.int_w(w_value) + try: + pypyjitdriver.set_param(key, intval) + except ValueError: + raise operationerrfmt(space.w_TypeError, + "no JIT parameter '%s'", key) @dont_look_inside def residual_call(space, w_callable, __args__): diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -2391,25 +2391,5 @@ self.meta_interp(main, []) - def test_disable_opts(self): - jitdriver = JitDriver(greens = [], reds = ['a']) - - class A(object): - def __init__(self, i): - self.i = i - - def f(): - a = A(0) - - while a.i < 10: - jitdriver.jit_merge_point(a=a) - jitdriver.can_enter_jit(a=a) - a = A(a.i + 1) - - self.meta_interp(f, []) - self.check_loops(new_with_vtable=0) - self.meta_interp(f, [], disable_opts='virtualize') - self.check_loops(new_with_vtable=1) - class TestLLtype(BaseLLtypeTests, LLJitMixin): pass diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -9,8 +9,6 @@ """Keeps track of the bounds placed on integers by guards and remove redundant guards""" - name = 'intbounds' - def setup(self): self.posponedop = None self.nextop = None diff --git a/pypy/jit/metainterp/test/test_ztranslation.py b/pypy/jit/metainterp/test/test_ztranslation.py --- a/pypy/jit/metainterp/test/test_ztranslation.py +++ b/pypy/jit/metainterp/test/test_ztranslation.py @@ -51,7 +51,8 @@ set_jitcell_at=set_jitcell_at, get_printable_location=get_printable_location) def f(i): - for param, defl in unroll_parameters: + for param in unroll_parameters: + defl = PARAMETERS[param] jitdriver.set_param(param, defl) jitdriver.set_param("threshold", 3) jitdriver.set_param("trace_eagerness", 2) diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py --- a/pypy/rlib/jit.py +++ b/pypy/rlib/jit.py @@ -2,7 +2,7 @@ import sys from pypy.rpython.extregistry import ExtRegistryEntry from pypy.rlib.objectmodel import CDefinedIntSymbolic -from pypy.rlib.objectmodel import keepalive_until_here, specialize +from pypy.rlib.objectmodel import keepalive_until_here from pypy.rlib.unroll import unrolling_iterable from pypy.rlib.nonconst import NonConstant @@ -270,13 +270,12 @@ PARAMETERS = {'threshold': 1000, 'trace_eagerness': 200, 'trace_limit': 10000, - 'inlining': 0, + 'inlining': False, 'optimizer': OPTIMIZER_FULL, 'loop_longevity': 1000, 'retrace_limit': 5, - 'disable_opts': '', } -unroll_parameters = unrolling_iterable(PARAMETERS.items()) +unroll_parameters = unrolling_iterable(PARAMETERS.keys()) # ____________________________________________________________ @@ -333,14 +332,14 @@ # (internal, must receive a constant 'name') assert name in PARAMETERS - @specialize.arg(0, 1) def set_param(self, name, value): """Set one of the tunable JIT parameter.""" - for name1, _ in unroll_parameters: + for name1 in unroll_parameters: if name1 == name: self._set_param(name1, value) return raise ValueError("no such parameter") + set_param._annspecialcase_ = 'specialize:arg(0)' def set_user_param(self, text): """Set the tunable JIT parameters from a user-supplied string @@ -352,17 +351,12 @@ parts = s.split('=') if len(parts) != 2: raise ValueError + try: + value = int(parts[1]) + except ValueError: + raise # re-raise the ValueError (annotator hint) name = parts[0] - value = parts[1] - if name == 'disable_opts': - self.set_param('disable_opts', value) - else: - for name1, _ in unroll_parameters: - if name1 == name and name1 != 'disable_opts': - try: - self.set_param(name1, int(value)) - except ValueError: - raise + self.set_param(name, value) set_user_param._annspecialcase_ = 'specialize:arg(0)' def _make_extregistryentries(self): @@ -543,10 +537,7 @@ def compute_result_annotation(self, s_name, s_value): from pypy.annotation import model as annmodel assert s_name.is_constant() - if annmodel.SomeInteger().contains(s_value): - pass - else: - assert annmodel.SomeString().contains(s_value) + assert annmodel.SomeInteger().contains(s_value) return annmodel.s_None def specialize_call(self, hop): @@ -554,7 +545,7 @@ hop.exception_cannot_occur() driver = self.instance.im_self name = hop.args_s[0].const - v_value = hop.inputarg(hop.args_r[1], arg=1) + v_value = hop.inputarg(lltype.Signed, arg=1) vlist = [hop.inputconst(lltype.Void, "set_param"), hop.inputconst(lltype.Void, driver), hop.inputconst(lltype.Void, name), diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py --- a/pypy/jit/metainterp/optimizeopt/virtualize.py +++ b/pypy/jit/metainterp/optimizeopt/virtualize.py @@ -261,8 +261,6 @@ class OptVirtualize(optimizer.Optimization): "Virtualize objects until they escape." - name = 'virtualize' - def reconstruct_for_next_iteration(self, optimizer, valuemap): return self From commits-noreply at bitbucket.org Sat Mar 5 21:37:59 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 5 Mar 2011 21:37:59 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed test_file2k.py, added support for PYTHONIOENCODING. Message-ID: <20110305203759.AB44A282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42427:327b99e513e5 Date: 2011-03-05 12:37 -0800 http://bitbucket.org/pypy/pypy/changeset/327b99e513e5/ Log: Fixed test_file2k.py, added support for PYTHONIOENCODING. diff --git a/pypy/module/_file/interp_file.py b/pypy/module/_file/interp_file.py --- a/pypy/module/_file/interp_file.py +++ b/pypy/module/_file/interp_file.py @@ -28,8 +28,10 @@ stream = None w_name = None mode = "" + binary = False softspace= 0 # Required according to file object docs - encoding = None # This is not used internally by file objects + encoding = None + errors = None fd = -1 newlines = 0 # Updated when the stream is closed @@ -46,6 +48,7 @@ def fdopenstream(self, stream, fd, mode, w_name=None): self.fd = fd self.mode = mode + self.binary = "b" in mode if w_name is not None: self.w_name = w_name self.stream = stream @@ -229,8 +232,11 @@ size = space.r_longlong_w(w_size) stream.truncate(size) - @unwrap_spec(data='bufferstr') - def direct_write(self, data): + def direct_write(self, w_data): + space = self.space + if not self.binary and space.isinstance_w(w_data, space.w_unicode): + w_data = space.call_method(w_data, "encode", space.wrap(self.encoding), space.wrap(self.errors)) + data = space.bufferstr_w(w_data) self.softspace = 0 self.getstream().write(data) @@ -423,7 +429,7 @@ if not e.match(space, space.w_StopIteration): raise break # done - self.file_write(space.str_w(w_line)) + self.file_write(w_line) def file_readinto(self, w_rwbuffer): """readinto() -> Undocumented. Don't use this; it may go away.""" @@ -510,6 +516,7 @@ doc = "file mode ('r', 'U', 'w', 'a', " "possibly with 'b' or '+' added)"), encoding = interp_attrproperty('encoding', cls=W_File), + errors = interp_attrproperty('errors', cls=W_File), closed = GetSetProperty(descr_file_closed, cls=W_File, doc="True if the file is closed"), newlines = GetSetProperty(descr_file_newlines, cls=W_File, @@ -538,3 +545,9 @@ def getopenstreams(space): return space.fromcache(FileState).openstreams + + + at unwrap_spec(file=W_File, encoding="str_or_None", errors="str_or_None") +def set_file_encoding(space, file, encoding=None, errors=None): + file.encoding = encoding + file.errors = errors \ No newline at end of file diff --git a/pypy/module/__pypy__/interp_magic.py b/pypy/module/__pypy__/interp_magic.py --- a/pypy/module/__pypy__/interp_magic.py +++ b/pypy/module/__pypy__/interp_magic.py @@ -3,6 +3,8 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.objspace.std.typeobject import MethodCache from pypy.objspace.std.mapdict import IndexCache +from pypy.module._file.interp_file import W_File + def internal_repr(space, w_object): return space.wrap('%r' % (w_object,)) diff --git a/pypy/translator/goal/app_main.py b/pypy/translator/goal/app_main.py --- a/pypy/translator/goal/app_main.py +++ b/pypy/translator/goal/app_main.py @@ -254,6 +254,22 @@ sys.path.append(dir) _seen[dir] = True +def set_io_encoding(io_encoding): + try: + import _file + except ImportError: + import ctypes # HACK: while running on top of CPython + set_file_encoding = ctypes.pythonapi.PyFile_SetEncodingAndErrors + set_file_encoding.argtypes = [ctypes.py_object, ctypes.c_char_p, ctypes.c_char_p] + else: + set_file_encoding = _file.set_file_encoding + if ":" in io_encoding: + encoding, errors = io_encoding.split(":", 1) + else: + encoding, errors = io_encoding, None + for f in [sys.stdin, sys.stdout, sys.stderr]: + set_file_encoding(f, encoding, errors) + # Order is significant! sys_flags = ( "debug", @@ -447,7 +463,6 @@ elif not sys.stdout.isatty(): set_fully_buffered_io() - mainmodule = type(sys)('__main__') sys.modules['__main__'] = mainmodule @@ -458,6 +473,10 @@ print >> sys.stderr, "'import site' failed" readenv = not ignore_environment + io_encoding = readenv and os.getenv("PYTHONIOENCODING") + if io_encoding: + set_io_encoding(io_encoding) + pythonwarnings = readenv and os.getenv('PYTHONWARNINGS') if pythonwarnings: warnoptions.extend(pythonwarnings.split(',')) diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py --- a/pypy/translator/goal/test2/test_app_main.py +++ b/pypy/translator/goal/test2/test_app_main.py @@ -3,7 +3,7 @@ """ from __future__ import with_statement import py -import sys, os, re, runpy +import sys, os, re, runpy, subprocess import autopath from pypy.tool.udir import udir from contextlib import contextmanager @@ -535,11 +535,16 @@ class TestNonInteractive: def run(self, cmdline, senddata='', expect_prompt=False, - expect_banner=False, python_flags=''): + expect_banner=False, python_flags='', env=None): cmdline = '%s %s "%s" %s' % (sys.executable, python_flags, app_main, cmdline) print 'POPEN:', cmdline - child_in, child_out_err = os.popen4(cmdline) + process = subprocess.Popen( + cmdline, + stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, + shell=True, env=env + ) + child_in, child_out_err = process.stdin, process.stdout child_in.write(senddata) child_in.close() data = child_out_err.read() @@ -728,6 +733,27 @@ data = self.run(p + os.sep) assert data == '42\n' + def test_pythonioencoding(self): + if sys.version_info < (2, 7): + skip("test requires Python >= 2.7") + for encoding, expected in [ + ("iso-8859-15", "15\xa4"), + ("utf-8", '15\xe2\x82\xac'), + ("utf-16-le", '1\x005\x00\xac\x20'), + ("iso-8859-1:ignore", "15"), + ("iso-8859-1:replace", "15?"), + ("iso-8859-1:backslashreplace", "15\\u20ac"), + ]: + p = getscript_in_dir(""" + import sys + sys.stdout.write(u'15\u20ac') + sys.stdout.flush() + """) + env = os.environ.copy() + env["PYTHONIOENCODING"] = encoding + data = self.run(p, env=env) + assert data == expected + class AppTestAppMain: diff --git a/pypy/module/_file/test/test_file.py b/pypy/module/_file/test/test_file.py --- a/pypy/module/_file/test/test_file.py +++ b/pypy/module/_file/test/test_file.py @@ -207,6 +207,32 @@ exc = raises(IOError, self.file, os.curdir, 'w') assert exc.value.filename == os.curdir + def test_encoding_errors(self): + import _file + + with self.file(self.temppath, "w") as f: + _file.set_file_encoding(f, "utf-8") + f.write(u'15\u20ac') + + assert f.encoding == "utf-8" + assert f.errors is None + + with self.file(self.temppath, "r") as f: + data = f.read() + assert data == '15\xe2\x82\xac' + + with self.file(self.temppath, "w") as f: + _file.set_file_encoding(f, "iso-8859-1", "ignore") + f.write(u'15\u20ac') + + assert f.encoding == "iso-8859-1" + assert f.errors == "ignore" + + with self.file(self.temppath, "r") as f: + data = f.read() + assert data == "15" + + class AppTestConcurrency(object): # these tests only really make sense on top of a translated pypy-c, diff --git a/pypy/module/_file/__init__.py b/pypy/module/_file/__init__.py --- a/pypy/module/_file/__init__.py +++ b/pypy/module/_file/__init__.py @@ -9,6 +9,7 @@ interpleveldefs = { "file": "interp_file.W_File", + "set_file_encoding": "interp_file.set_file_encoding", } def __init__(self, space, *args): From commits-noreply at bitbucket.org Sat Mar 5 22:05:20 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 5 Mar 2011 22:05:20 +0100 (CET) Subject: [pypy-svn] pypy default: Expose array.ArrayType as an alias for array.array Message-ID: <20110305210520.B094C282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42428:5569f07faf7c Date: 2011-03-05 13:04 -0800 http://bitbucket.org/pypy/pypy/changeset/5569f07faf7c/ Log: Expose array.ArrayType as an alias for array.array diff --git a/pypy/module/array/__init__.py b/pypy/module/array/__init__.py --- a/pypy/module/array/__init__.py +++ b/pypy/module/array/__init__.py @@ -10,6 +10,7 @@ interpleveldefs = { 'array': 'interp_array.W_ArrayBase', + 'ArrayType': 'interp_array.W_ArrayBase', } appleveldefs = { From commits-noreply at bitbucket.org Sat Mar 5 22:32:42 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 5 Mar 2011 22:32:42 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, armin) fixed translation Message-ID: <20110305213242.27DAD282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42429:a30ac9cb208f Date: 2011-03-05 13:32 -0800 http://bitbucket.org/pypy/pypy/changeset/a30ac9cb208f/ Log: (alex, armin) fixed translation diff --git a/pypy/module/_file/interp_file.py b/pypy/module/_file/interp_file.py --- a/pypy/module/_file/interp_file.py +++ b/pypy/module/_file/interp_file.py @@ -237,6 +237,9 @@ if not self.binary and space.isinstance_w(w_data, space.w_unicode): w_data = space.call_method(w_data, "encode", space.wrap(self.encoding), space.wrap(self.errors)) data = space.bufferstr_w(w_data) + self.do_direct_write(data) + + def do_direct_write(self, data): self.softspace = 0 self.getstream().write(data) @@ -324,7 +327,7 @@ '''fileno() -> integer "file descriptor". This is needed for lower-level file interfaces, such os.read().''') - + _decl(locals(), "flush", """flush() -> None. Flush the internal I/O buffer.""") @@ -409,7 +412,7 @@ return '?' elif self.space.is_true(self.space.isinstance(w_name, self.space.w_str)): - return "'%s'" % self.space.str_w(w_name) + return "'%s'" % self.space.str_w(w_name) else: return self.space.str_w(self.space.repr(w_name)) diff --git a/pypy/module/marshal/interp_marshal.py b/pypy/module/marshal/interp_marshal.py --- a/pypy/module/marshal/interp_marshal.py +++ b/pypy/module/marshal/interp_marshal.py @@ -123,7 +123,7 @@ class DirectStreamWriter(StreamReaderWriter): def write(self, data): - self.file.direct_write(data) + self.file.do_direct_write(data) class DirectStreamReader(StreamReaderWriter): def read(self, n): @@ -449,7 +449,7 @@ res_w = [None] * lng idx = 0 space = self.space - w_ret = space.w_None # something not + w_ret = space.w_None # something not while idx < lng: tc = self.get1() w_ret = self._dispatch[ord(tc)](space, self, tc) From commits-noreply at bitbucket.org Sun Mar 6 01:38:59 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 6 Mar 2011 01:38:59 +0100 (CET) Subject: [pypy-svn] pypy default: During optimization, we can actually detect and complain if we see Message-ID: <20110306003859.AF734282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r42430:8bf27ae115d3 Date: 2011-03-05 10:48 -0800 http://bitbucket.org/pypy/pypy/changeset/8bf27ae115d3/ Log: During optimization, we can actually detect and complain if we see a getfield_gc_pure() followed by a setfield_gc() on the same field of the same object. This should help tracking down the issue of bogus _immutable_fields_ declarations. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -163,7 +163,10 @@ def optimize_loop(self, ops, optops, expected_preamble=None): loop = self.parse(ops) - expected = self.parse(optops) + if optops != "crash!": + expected = self.parse(optops) + else: + expected = "crash!" if expected_preamble: expected_preamble = self.parse(expected_preamble) # @@ -195,7 +198,8 @@ print loop.inputargs print '\n'.join([str(o) for o in loop.operations]) print - + + assert expected != "crash!", "should have raised an exception" self.assert_equal(loop, expected) if expected_preamble: self.assert_equal(loop.preamble, expected_preamble, @@ -1346,6 +1350,26 @@ self.node.value = 5 self.optimize_loop(ops, expected) + def test_getfield_gc_pure_3(self): + ops = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + escape(p2) + p3 = getfield_gc_pure(p1, descr=nextdescr) + escape(p3) + jump() + """ + expected = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + escape(p2) + escape(p2) + jump() + """ + self.optimize_loop(ops, expected) + def test_getfield_gc_nonpure_2(self): ops = """ [i] @@ -3806,6 +3830,47 @@ self.node.value = 5 self.optimize_loop(ops, expected) + def test_complains_getfieldpure_setfield(self): + from pypy.jit.metainterp.optimizeopt.heap import BogusPureField + ops = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=nextdescr) + jump(p3) + """ + py.test.raises(BogusPureField, self.optimize_loop, ops, "crash!") + + def test_dont_complains_different_field(self): + ops = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=otherdescr) + escape(p2) + jump(p3) + """ + expected = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=otherdescr) + escape(p2) + jump(p3) + """ + self.optimize_loop(ops, expected) + + def test_dont_complains_different_object(self): + ops = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + p3 = escape() + setfield_gc(p3, p1, descr=nextdescr) + jump() + """ + self.optimize_loop(ops, ops) + def test_getfield_guard_const(self): ops = """ [p0] diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -1,6 +1,8 @@ +import os from pypy.jit.metainterp.optimizeutil import _findall from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.rlib.objectmodel import we_are_translated +from pypy.jit.metainterp.jitexc import JitException from pypy.jit.metainterp.optimizeopt.optimizer import Optimization @@ -10,6 +12,9 @@ self.var_index_item = None self.var_index_indexvalue = None +class BogusPureField(JitException): + pass + class OptHeap(Optimization): """Cache repeated heap accesses""" @@ -298,6 +303,12 @@ d[value] = fieldvalue def optimize_SETFIELD_GC(self, op): + if self.has_pure_result(rop.GETFIELD_GC_PURE, [op.getarg(0)], + op.getdescr()): + os.write(2, '[bogus _immutable_field_ declaration: %s]\n' % + (op.getdescr().repr_of_descr())) + raise BogusPureField + # value = self.getvalue(op.getarg(0)) fieldvalue = self.getvalue(op.getarg(1)) cached_fieldvalue = self.read_cached_field(op.getdescr(), value) diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -216,6 +216,14 @@ op = ResOperation(opnum, args, result) self.optimizer.pure_operations[self.optimizer.make_args_key(op)] = op + def has_pure_result(self, opnum, args, descr): + op = ResOperation(opnum, args, None) + key = self.optimizer.make_args_key(op) + op = self.optimizer.pure_operations.get(key, None) + if op is None: + return False + return op.getdescr() is descr + def setup(self): pass From commits-noreply at bitbucket.org Sun Mar 6 01:39:00 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 6 Mar 2011 01:39:00 +0100 (CET) Subject: [pypy-svn] pypy default: Add a missing gc.collect(). While we are at it, rewrite gc.collect() to Message-ID: <20110306003900.657C0282BE9@codespeak.net> Author: Armin Rigo Branch: Changeset: r42431:b786a23e83c2 Date: 2011-03-06 00:34 +0000 http://bitbucket.org/pypy/pypy/changeset/b786a23e83c2/ Log: Add a missing gc.collect(). While we are at it, rewrite gc.collect() to test_support.gc_collect(). diff --git a/lib-python/modified-2.7.0/test/test_weakset.py b/lib-python/modified-2.7.0/test/test_weakset.py --- a/lib-python/modified-2.7.0/test/test_weakset.py +++ b/lib-python/modified-2.7.0/test/test_weakset.py @@ -332,10 +332,11 @@ next(it) # Trigger internal iteration # Destroy an item del items[-1] - gc.collect() # just in case + test_support.gc_collect() # We have removed either the first consumed items, or another one self.assertIn(len(list(it)), [len(items), len(items) - 1]) del it + test_support.gc_collect() # The removal has been committed self.assertEqual(len(s), len(items)) From commits-noreply at bitbucket.org Sun Mar 6 01:39:00 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 6 Mar 2011 01:39:00 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110306003900.C6187282BEA@codespeak.net> Author: Armin Rigo Branch: Changeset: r42432:0f08092e880e Date: 2011-03-06 01:38 +0100 http://bitbucket.org/pypy/pypy/changeset/0f08092e880e/ Log: merge heads From commits-noreply at bitbucket.org Sun Mar 6 01:47:18 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sun, 6 Mar 2011 01:47:18 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed CheckCursorRegistration test Message-ID: <20110306004718.32AE436C20A@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42433:4ccf4839a116 Date: 2011-03-05 16:40 -0800 http://bitbucket.org/pypy/pypy/changeset/4ccf4839a116/ Log: Fixed CheckCursorRegistration test diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -245,6 +245,8 @@ self._isolation_level = isolation_level self.detect_types = detect_types + self.cursors = [] + self.Error = Error self.Warning = Warning self.InterfaceError = InterfaceError @@ -307,6 +309,12 @@ "The object was created in thread id %d and this is thread id %d", self.thread_ident, thread_get_ident()) + def _reset_cursors(self): + for cursor_ref in self.cursors: + cursor = cursor_ref() + if cursor: + cursor.reset = True + def cursor(self, factory=None): self._check_thread() self._check_closed() @@ -421,6 +429,7 @@ raise self._get_exception(ret) finally: sqlite.sqlite3_finalize(statement) + self._reset_cursors() def _check_closed(self): if getattr(self, 'closed', True): @@ -450,6 +459,7 @@ self.closed = True ret = sqlite.sqlite3_close(self.db) + self._reset_cursors() if ret != SQLITE_OK: raise self._get_exception(ret) @@ -629,6 +639,7 @@ raise TypeError con._check_thread() con._check_closed() + con.cursors.append(weakref.ref(self)) self.connection = con self._description = None self.arraysize = 1 @@ -636,6 +647,7 @@ self.row_factory = None self.rowcount = -1 self.statement = None + self.reset = False def _check_closed(self): if not getattr(self, 'connection', None): @@ -736,8 +748,17 @@ def __iter__(self): return self.statement + def _check_reset(self): + if self.reset: + raise self.connection.InterfaceError("Cursor needed to be reset because " + "of commit/rollback and can " + "no longer be fetched from.") + + # do all statements def fetchone(self): self._check_closed() + self._check_reset() + if self.statement is None: return None @@ -750,6 +771,7 @@ def fetchmany(self, size=None): self._check_closed() + self._check_reset() if self.statement is None: return [] if size is None: @@ -763,6 +785,7 @@ def fetchall(self): self._check_closed() + self._check_reset() if self.statement is None: return [] return list(self.statement) @@ -782,6 +805,7 @@ if self.statement: self.statement.reset() self.statement = None + self.connection.cursors.remove(weakref.ref(self)) self.connection = None def setinputsizes(self, *args): From commits-noreply at bitbucket.org Sun Mar 6 01:47:18 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sun, 6 Mar 2011 01:47:18 +0100 (CET) Subject: [pypy-svn] pypy default: Merging heads. Message-ID: <20110306004718.8E55F36C20B@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42434:6f7173d7683e Date: 2011-03-05 16:46 -0800 http://bitbucket.org/pypy/pypy/changeset/6f7173d7683e/ Log: Merging heads. From commits-noreply at bitbucket.org Sun Mar 6 02:09:32 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 02:09:32 +0100 (CET) Subject: [pypy-svn] pypy default: Fix one of the failures in test_sys, encode unicode correctly when printing SystemExit(errorcode) Message-ID: <20110306010932.23540282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42435:73c2f8332dd9 Date: 2011-03-05 16:40 -0800 http://bitbucket.org/pypy/pypy/changeset/73c2f8332dd9/ Log: Fix one of the failures in test_sys, encode unicode correctly when printing SystemExit(errorcode) diff --git a/pypy/translator/goal/app_main.py b/pypy/translator/goal/app_main.py --- a/pypy/translator/goal/app_main.py +++ b/pypy/translator/goal/app_main.py @@ -37,7 +37,7 @@ except AttributeError: pass # too bad else: - print >> stderr, exitcode + stderr.write(exitcode) exitcode = 1 raise SystemExit(exitcode) diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py --- a/pypy/translator/goal/test2/test_app_main.py +++ b/pypy/translator/goal/test2/test_app_main.py @@ -534,7 +534,7 @@ class TestNonInteractive: - def run(self, cmdline, senddata='', expect_prompt=False, + def run_with_status_code(self, cmdline, senddata='', expect_prompt=False, expect_banner=False, python_flags='', env=None): cmdline = '%s %s "%s" %s' % (sys.executable, python_flags, app_main, cmdline) @@ -546,11 +546,14 @@ ) child_in, child_out_err = process.stdin, process.stdout child_in.write(senddata) - child_in.close() data = child_out_err.read() - child_out_err.close() + process.communicate() assert (banner in data) == expect_banner # no banner unless expected assert ('>>> ' in data) == expect_prompt # no prompt unless expected + return data, process.returncode + + def run(self, *args, **kwargs): + data, status = self.run_with_status_code(*args, **kwargs) return data def test_script_on_stdin(self): @@ -754,6 +757,19 @@ data = self.run(p, env=env) assert data == expected + def test_sys_exit_pythonioencoding(self): + if sys.version_info < (2, 7): + skip("test required Python >= 2.7") + p = getscript_in_dir(""" + import sys + sys.exit(u'15\u20ac') + """) + env = os.environ.copy() + env["PYTHONIOENCODING"] = "utf-8" + data, status = self.run_with_status_code(p, env=env) + assert status == 1 + assert data.startswith("15\xe2\x82\xac") + class AppTestAppMain: From commits-noreply at bitbucket.org Sun Mar 6 02:09:32 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 02:09:32 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110306010932.50EFA282BE9@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42436:b0b6e5f4bb31 Date: 2011-03-05 16:47 -0800 http://bitbucket.org/pypy/pypy/changeset/b0b6e5f4bb31/ Log: Merged upstream. From commits-noreply at bitbucket.org Sun Mar 6 02:09:34 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 02:09:34 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, armin) handle `print unicode` encoding correctly, fixes a failure in test_sys Message-ID: <20110306010934.3BC44282BEB@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42437:4c541f936faf Date: 2011-03-05 17:06 -0800 http://bitbucket.org/pypy/pypy/changeset/4c541f936faf/ Log: (alex, armin) handle `print unicode` encoding correctly, fixes a failure in test_sys diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -1408,6 +1408,8 @@ def print_item_to(x, stream): if file_softspace(stream, False): stream.write(" ") + if isinstance(x, unicode) and getattr(stream, "encoding", None) is not None: + x = x.encode(stream.encoding, getattr(stream, "errors", None) or "strict") stream.write(str(x)) # add a softspace unless we just printed a string which ends in a '\t' diff --git a/pypy/interpreter/test/test_interpreter.py b/pypy/interpreter/test/test_interpreter.py --- a/pypy/interpreter/test/test_interpreter.py +++ b/pypy/interpreter/test/test_interpreter.py @@ -270,6 +270,25 @@ finally: sys.stdout = save + def test_print_unicode(self): + import sys + + save = sys.stdout + class Out(object): + def __init__(self): + self.data = [] + + def write(self, x): + self.data.append(x) + sys.stdout = out = Out() + try: + raises(UnicodeError, "print unichr(0xa2)") + out.encoding = "cp424" + print unichr(0xa2) + assert out.data == [unichr(0xa2).encode("cp424"), "\n"] + finally: + sys.stdout = save + def test_identity(self): def f(x): return x assert f(666) == 666 From commits-noreply at bitbucket.org Sun Mar 6 02:09:34 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 02:09:34 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110306010934.9F612282BF2@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42438:3fa1a9aec379 Date: 2011-03-05 17:08 -0800 http://bitbucket.org/pypy/pypy/changeset/3fa1a9aec379/ Log: Merged upstream. From commits-noreply at bitbucket.org Sun Mar 6 03:36:27 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 03:36:27 +0100 (CET) Subject: [pypy-svn] pypy default: Remove helper function from fcntl which at some point became a space method. Message-ID: <20110306023627.C420B282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42439:496a59afa455 Date: 2011-03-05 18:35 -0800 http://bitbucket.org/pypy/pypy/changeset/496a59afa455/ Log: Remove helper function from fcntl which at some point became a space method. diff --git a/pypy/module/fcntl/app_fcntl.py b/pypy/module/fcntl/app_fcntl.py --- a/pypy/module/fcntl/app_fcntl.py +++ b/pypy/module/fcntl/app_fcntl.py @@ -1,12 +1,3 @@ - -def _conv_descriptor(f): - if hasattr(f, "fileno"): - return f.fileno() - elif isinstance(f, (int, long)): - return f - else: - raise TypeError, "argument must be an int, or have a fileno() method." - __doc__ = """This module performs file control and I/O control on file descriptors. It is an interface to the fcntl() and ioctl() Unix routines. File descriptors can be obtained with the fileno() method of diff --git a/pypy/module/fcntl/interp_fcntl.py b/pypy/module/fcntl/interp_fcntl.py --- a/pypy/module/fcntl/interp_fcntl.py +++ b/pypy/module/fcntl/interp_fcntl.py @@ -15,19 +15,19 @@ ('l_pid', rffi.LONG), ('l_type', rffi.SHORT), ('l_whence', rffi.SHORT)]) has_flock = platform.Has('flock') - + # constants, look in fcntl.h and platform docs for the meaning -# some constants are linux only so they will be correctly exposed outside +# some constants are linux only so they will be correctly exposed outside # depending on the OS constants = {} constant_names = ['LOCK_SH', 'LOCK_EX', 'LOCK_NB', 'LOCK_UN', 'F_DUPFD', 'F_GETFD', 'F_SETFD', 'F_GETFL', 'F_SETFL', 'F_UNLCK', 'FD_CLOEXEC', - 'LOCK_MAND', 'LOCK_READ', 'LOCK_WRITE', 'LOCK_RW', 'F_GETSIG', 'F_SETSIG', + 'LOCK_MAND', 'LOCK_READ', 'LOCK_WRITE', 'LOCK_RW', 'F_GETSIG', 'F_SETSIG', 'F_GETLK64', 'F_SETLK64', 'F_SETLKW64', 'F_GETLK', 'F_SETLK', 'F_SETLKW', 'F_GETOWN', 'F_SETOWN', 'F_RDLCK', 'F_WRLCK', 'F_SETLEASE', 'F_GETLEASE', 'F_NOTIFY', 'F_EXLCK', 'F_SHLCK', 'DN_ACCESS', 'DN_MODIFY', 'DN_CREATE', 'DN_DELETE', 'DN_RENAME', 'DN_ATTRIB', 'DN_MULTISHOT', 'I_NREAD', - 'I_PUSH', 'I_POP', 'I_LOOK', 'I_FLUSH', 'I_SRDOPT', 'I_GRDOPT', 'I_STR', + 'I_PUSH', 'I_POP', 'I_LOOK', 'I_FLUSH', 'I_SRDOPT', 'I_GRDOPT', 'I_STR', 'I_SETSIG', 'I_GETSIG', 'I_FIND', 'I_LINK', 'I_UNLINK', 'I_PEEK', 'I_FDINSERT', 'I_SENDFD', 'I_RECVFD', 'I_SWROPT', 'I_LIST', 'I_PLINK', 'I_PUNLINK', 'I_FLUSHBAND', 'I_CKBAND', 'I_GETBAND', 'I_ATMARK', @@ -79,12 +79,6 @@ w_obj = space.getattr(w_module, space.wrap(obj_name)) return w_obj -def _conv_descriptor(space, w_f): - w_conv_descriptor = _get_module_object(space, "_conv_descriptor") - w_fd = space.call_function(w_conv_descriptor, w_f) - fd = space.int_w(w_fd) - return rffi.cast(rffi.INT, fd) # C long => C int - def _check_flock_op(space, op): if op == LOCK_UN: @@ -114,7 +108,7 @@ integer corresponding to the return value of the fcntl call in the C code. """ - fd = _conv_descriptor(space, w_fd) + fd = space.c_filedescriptor_w(w_fd) op = rffi.cast(rffi.INT, op) # C long => C int try: @@ -154,7 +148,7 @@ manual flock(3) for details. (On some systems, this function is emulated using fcntl().)""" - fd = _conv_descriptor(space, w_fd) + fd = space.c_filedescriptor_w(w_fd) if has_flock: rv = c_flock(fd, op) @@ -196,7 +190,7 @@ 1 - relative to the current buffer position (SEEK_CUR) 2 - relative to the end of the file (SEEK_END)""" - fd = _conv_descriptor(space, w_fd) + fd = space.c_filedescriptor_w(w_fd) l = _check_flock_op(space, op) if start: @@ -230,8 +224,8 @@ # XXX this function's interface is a mess. # We try to emulate the behavior of Python >= 2.5 w.r.t. mutate_flag - - fd = _conv_descriptor(space, w_fd) + + fd = space.c_filedescriptor_w(w_fd) op = rffi.cast(rffi.INT, op) # C long => C int if mutate_flag != 0: diff --git a/pypy/module/fcntl/test/test_fcntl.py b/pypy/module/fcntl/test/test_fcntl.py --- a/pypy/module/fcntl/test/test_fcntl.py +++ b/pypy/module/fcntl/test/test_fcntl.py @@ -18,32 +18,14 @@ tmpprefix = str(udir.ensure('test_fcntl', dir=1).join('tmp_')) cls.w_tmp = space.wrap(tmpprefix) - def test_conv_descriptor(self): - import fcntl - if not hasattr(fcntl, '_conv_descriptor'): - skip("PyPy only") - - f = open(self.tmp + "a", "w+") - - raises(TypeError, fcntl._conv_descriptor, "foo") - raises(TypeError, fcntl._conv_descriptor, 2.0) - import cStringIO - raises(TypeError, fcntl._conv_descriptor, cStringIO.StringIO()) - res = fcntl._conv_descriptor(10) - res_1 = fcntl._conv_descriptor(f) - assert res == 10 - assert res_1 == f.fileno() - - f.close() - def test_fcntl(self): import fcntl import os import sys import struct - + f = open(self.tmp + "b", "w+") - + fcntl.fcntl(f, 1, 0) fcntl.fcntl(f, 1) raises(TypeError, fcntl.fcntl, "foo") @@ -52,7 +34,7 @@ assert fcntl.fcntl(f, 1, 0) == 0 assert fcntl.fcntl(f, 2, "foo") == "foo" assert fcntl.fcntl(f, 2, buffer("foo")) == "foo" - + try: os.O_LARGEFILE except AttributeError: @@ -60,7 +42,7 @@ else: start_len = "qq" - if sys.platform in ('netbsd1', 'netbsd2', 'netbsd3', + if sys.platform in ('netbsd1', 'netbsd2', 'netbsd3', 'Darwin1.2', 'darwin', 'freebsd2', 'freebsd3', 'freebsd4', 'freebsd5', 'freebsd6', 'freebsd7', 'freebsd8', 'freebsd9', @@ -118,15 +100,15 @@ # with "Inappropriate ioctl for device" raises(IOError, fcntl.fcntl, f, fcntl.F_GETOWN) raises(IOError, fcntl.fcntl, f, fcntl.F_SETOWN, 20) - + f.close() def test_flock(self): import fcntl import sys - + f = open(self.tmp + "c", "w+") - + raises(TypeError, fcntl.flock, "foo") raises(TypeError, fcntl.flock, f, "foo") fcntl.flock(f, fcntl.LOCK_SH) @@ -134,22 +116,22 @@ # LOCK_NB flag was selected. raises(IOError, fcntl.flock, f, fcntl.LOCK_NB) fcntl.flock(f, fcntl.LOCK_UN) - + f.close() def test_lockf(self): import fcntl - + f = open(self.tmp + "d", "w+") - + raises(TypeError, fcntl.lockf, f, "foo") raises(TypeError, fcntl.lockf, f, fcntl.LOCK_UN, "foo") raises(ValueError, fcntl.lockf, f, -256) raises(ValueError, fcntl.lockf, f, 256) - + fcntl.lockf(f, fcntl.LOCK_SH) fcntl.lockf(f, fcntl.LOCK_UN) - + f.close() def test_ioctl(self): @@ -163,7 +145,7 @@ TIOCGPGRP = 0x40047477 else: skip("don't know how to test ioctl() on this platform") - + raises(TypeError, fcntl.ioctl, "foo") raises(TypeError, fcntl.ioctl, 0, "foo") #raises(TypeError, fcntl.ioctl, 0, TIOCGPGRP, float(0)) diff --git a/pypy/module/fcntl/__init__.py b/pypy/module/fcntl/__init__.py --- a/pypy/module/fcntl/__init__.py +++ b/pypy/module/fcntl/__init__.py @@ -10,10 +10,9 @@ } appleveldefs = { - '_conv_descriptor': 'app_fcntl._conv_descriptor', '__doc__': 'app_fcntl.__doc__' } - + def buildloaders(cls): from pypy.module.fcntl import interp_fcntl for constant, value in interp_fcntl.constants.iteritems(): From commits-noreply at bitbucket.org Sun Mar 6 03:37:57 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 03:37:57 +0100 (CET) Subject: [pypy-svn] pypy default: Recursively remove dead code. Message-ID: <20110306023757.36024282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42440:50df87ba71f6 Date: 2011-03-05 18:37 -0800 http://bitbucket.org/pypy/pypy/changeset/50df87ba71f6/ Log: Recursively remove dead code. diff --git a/pypy/module/fcntl/interp_fcntl.py b/pypy/module/fcntl/interp_fcntl.py --- a/pypy/module/fcntl/interp_fcntl.py +++ b/pypy/module/fcntl/interp_fcntl.py @@ -74,11 +74,6 @@ return wrap_oserror(space, OSError(errno, funcname), exception_name = 'w_IOError') -def _get_module_object(space, obj_name): - w_module = space.getbuiltinmodule('fcntl') - w_obj = space.getattr(w_module, space.wrap(obj_name)) - return w_obj - def _check_flock_op(space, op): if op == LOCK_UN: From commits-noreply at bitbucket.org Sun Mar 6 03:41:28 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sun, 6 Mar 2011 03:41:28 +0100 (CET) Subject: [pypy-svn] pypy default: Clear reset flag on cursor objects after executing a new sql statement. Message-ID: <20110306024128.4FDEE282BE7@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42441:e9fb4752f843 Date: 2011-03-05 18:37 -0800 http://bitbucket.org/pypy/pypy/changeset/e9fb4752f843/ Log: Clear reset flag on cursor objects after executing a new sql statement. diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -657,6 +657,7 @@ def execute(self, sql, params=None): self._description = None + self.reset = False if type(sql) is unicode: sql = sql.encode("utf-8") self._check_closed() @@ -691,6 +692,7 @@ def executemany(self, sql, many_params): self._description = None + self.reset = False if type(sql) is unicode: sql = sql.encode("utf-8") self._check_closed() @@ -712,6 +714,7 @@ def executescript(self, sql): self._description = None + self.reset = False if type(sql) is unicode: sql = sql.encode("utf-8") self._check_closed() From commits-noreply at bitbucket.org Sun Mar 6 03:41:28 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sun, 6 Mar 2011 03:41:28 +0100 (CET) Subject: [pypy-svn] pypy default: Merging heads. Message-ID: <20110306024128.93C99282BE9@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42442:ddf6f7bbabcd Date: 2011-03-05 18:39 -0800 http://bitbucket.org/pypy/pypy/changeset/ddf6f7bbabcd/ Log: Merging heads. From commits-noreply at bitbucket.org Sun Mar 6 09:26:29 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sun, 6 Mar 2011 09:26:29 +0100 (CET) Subject: [pypy-svn] pypy pytest2: merge default Message-ID: <20110306082629.03190282BEA@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42443:34ac0158319a Date: 2011-03-06 09:20 +0100 http://bitbucket.org/pypy/pypy/changeset/34ac0158319a/ Log: merge default diff --git a/pypy/module/readline/test/test_c_readline.py b/pypy/module/readline/test/test_c_readline.py deleted file mode 100644 --- a/pypy/module/readline/test/test_c_readline.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -Directly test the basic ctypes wrappers. -""" - -import py -from pypy import conftest; conftest.translation_test_so_skip_if_appdirect() -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -def test_basic_import(): - c_readline.c_rl_initialize() diff --git a/pypy/module/__builtin__/app_file_stub.py b/pypy/module/__builtin__/app_file_stub.py deleted file mode 100644 --- a/pypy/module/__builtin__/app_file_stub.py +++ /dev/null @@ -1,20 +0,0 @@ -# NOT_RPYTHON - -class file(object): - """file(name[, mode[, buffering]]) -> file object - -Open a file. The mode can be 'r', 'w' or 'a' for reading (default), -writing or appending. The file will be created if it doesn't exist -when opened for writing or appending; it will be truncated when -opened for writing. Add a 'b' to the mode for binary files. -Add a '+' to the mode to allow simultaneous reading and writing. -If the buffering argument is given, 0 means unbuffered, 1 means line -buffered, and larger numbers specify the buffer size. -Add a 'U' to mode to open the file for input with universal newline -support. Any line ending in the input file will be seen as a '\n' -in Python. Also, a file so opened gains the attribute 'newlines'; -the value for this attribute is one of None (no newline read yet), -'\r', '\n', '\r\n' or a tuple containing all the newline types seen. - -Note: open() is an alias for file(). -""" diff --git a/pypy/module/_rawffi/error.py b/pypy/module/_rawffi/error.py deleted file mode 100644 --- a/pypy/module/_rawffi/error.py +++ /dev/null @@ -1,2 +0,0 @@ -class SegfaultException(Exception): - pass diff --git a/pypy/module/_socket/app_socket.py b/pypy/module/_socket/app_socket.py deleted file mode 100644 --- a/pypy/module/_socket/app_socket.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Implementation module for socket operations. - -See the socket module for documentation.""" - -class error(IOError): - pass - -class herror(error): - pass - -class gaierror(error): - pass - -class timeout(error): - pass diff --git a/pypy/module/readline/__init__.py b/pypy/module/readline/__init__.py deleted file mode 100644 --- a/pypy/module/readline/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.mixedmodule import MixedModule - -# XXX raw_input needs to check for space.readline_func and use -# it if its there - -class Module(MixedModule): - """Importing this module enables command line editing using GNU readline.""" - # the above line is the doc string of the translated module - - def setup_after_space_initialization(self): - from pypy.module.readline import c_readline - c_readline.setup_readline(self.space, self) - - interpleveldefs = { - 'readline' : 'interp_readline.readline', - } - - appleveldefs = { - 'parse_and_bind': 'app_stub.stub', - 'get_line_buffer': 'app_stub.stub_str', - 'insert_text': 'app_stub.stub', - 'read_init_file': 'app_stub.stub', - 'read_history_file': 'app_stub.stub', - 'write_history_file': 'app_stub.stub', - 'clear_history': 'app_stub.stub', - 'get_history_length': 'app_stub.stub_int', - 'set_history_length': 'app_stub.stub', - 'get_current_history_length': 'app_stub.stub_int', - 'get_history_item': 'app_stub.stub_str', - 'remove_history_item': 'app_stub.stub', - 'replace_history_item': 'app_stub.stub', - 'redisplay': 'app_stub.stub', - 'set_startup_hook': 'app_stub.stub', - 'set_pre_input_hook': 'app_stub.stub', - 'set_completer': 'app_stub.stub', - 'get_completer': 'app_stub.stub', - 'get_begidx': 'app_stub.stub_int', - 'get_endidx': 'app_stub.stub_int', - 'set_completer_delims': 'app_stub.stub', - 'get_completer_delims': 'app_stub.stub_str', - 'add_history': 'app_stub.stub', - } diff --git a/pypy/module/thread/app_thread.py b/pypy/module/thread/app_thread.py deleted file mode 100644 --- a/pypy/module/thread/app_thread.py +++ /dev/null @@ -1,7 +0,0 @@ -class error(Exception): - pass - -def exit(): - """This is synonymous to ``raise SystemExit''. It will cause the current -thread to exit silently unless the exception is caught.""" - raise SystemExit diff --git a/pypy/translator/c/test/test_dtoa.py b/pypy/translator/c/test/test_dtoa.py deleted file mode 100644 --- a/pypy/translator/c/test/test_dtoa.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import with_statement -from pypy.translator.tool.cbuild import ExternalCompilationInfo -from pypy.tool.autopath import pypydir -from pypy.rpython.lltypesystem import lltype, rffi -from pypy.rlib.rstring import StringBuilder -import py - -includes = [] -libraries = [] - -cdir = py.path.local(pypydir) / 'translator' / 'c' -files = [cdir / 'src' / 'dtoa.c'] -include_dirs = [cdir] - -eci = ExternalCompilationInfo( - include_dirs = include_dirs, - libraries = libraries, - separate_module_files = files, - separate_module_sources = [''' - #include - #include - #define WITH_PYMALLOC - #include "src/obmalloc.c" - '''], - export_symbols = ['_Py_dg_strtod', - '_Py_dg_dtoa', - '_Py_dg_freedtoa', - ], -) - -dg_strtod = rffi.llexternal( - '_Py_dg_strtod', [rffi.CCHARP, rffi.CCHARPP], rffi.DOUBLE, - compilation_info=eci) - -dg_dtoa = rffi.llexternal( - '_Py_dg_dtoa', [rffi.DOUBLE, rffi.INT, rffi.INT, - rffi.INTP, rffi.INTP, rffi.CCHARPP], rffi.CCHARP, - compilation_info=eci) - -dg_freedtoa = rffi.llexternal( - '_Py_dg_freedtoa', [rffi.CCHARP], lltype.Void, - compilation_info=eci) - -def strtod(input): - with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as end_ptr: - with rffi.scoped_str2charp(input) as ll_input: - result = dg_strtod(ll_input, end_ptr) - if end_ptr[0] and ord(end_ptr[0][0]): - offset = (rffi.cast(rffi.LONG, end_ptr[0]) - - rffi.cast(rffi.LONG, ll_input)) - raise ValueError("invalid input at position %d" % (offset,)) - return result - -def dtoa(value, mode=0, precision=0): - builder = StringBuilder(20) - with lltype.scoped_alloc(rffi.INTP.TO, 1) as decpt_ptr: - with lltype.scoped_alloc(rffi.INTP.TO, 1) as sign_ptr: - with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as end_ptr: - output_ptr = dg_dtoa(value, mode, precision, - decpt_ptr, sign_ptr, end_ptr) - try: - buflen = (rffi.cast(rffi.LONG, end_ptr[0]) - - rffi.cast(rffi.LONG, output_ptr)) - intpart = rffi.cast(lltype.Signed, decpt_ptr[0]) - if intpart <= buflen: - builder.append(rffi.charpsize2str(output_ptr, intpart)) - else: - builder.append(rffi.charpsize2str(output_ptr, buflen)) - while buflen < intpart: - builder.append('0') - intpart -= 1 - builder.append('.') - fracpart = buflen - intpart - if fracpart > 0: - ptr = rffi.ptradd(output_ptr, intpart) - builder.append(rffi.charpsize2str(ptr, fracpart)) - finally: - dg_freedtoa(output_ptr) - return builder.build() - -def test_strtod(): - assert strtod("12345") == 12345.0 - assert strtod("1.1") == 1.1 - assert strtod("3.47") == 3.47 - raises(ValueError, strtod, "123A") - -def test_dtoa(): - assert dtoa(3.47) == "3.47" - assert dtoa(1.1) == "1.1" - assert dtoa(12.3577) == "12.3577" - assert dtoa(10) == "10." - assert dtoa(1e100) == "1" + "0" * 100 + "." diff --git a/lib_pypy/cmath.py b/lib_pypy/cmath.py deleted file mode 100644 --- a/lib_pypy/cmath.py +++ /dev/null @@ -1,288 +0,0 @@ -"""This module is always available. It provides access to mathematical -functions for complex numbers.""" - -# Complex math module - -# much code borrowed from mathmodule.c - -import math -from math import e, pi - -try: from __pypy__ import builtinify -except ImportError: builtinify = lambda f: f - - -# constants -_one = complex(1., 0.) -_half = complex(0.5, 0.) -_i = complex(0., 1.) -_halfi = complex(0., 0.5) - - - -# internal functions not available from Python -def _to_complex(x): - if isinstance(x, complex): - return x - if isinstance(x, (str, unicode)): - raise TypeError('float or complex required') - return complex(x) - -def _prodi(x): - x = _to_complex(x) - real = -x.imag - imag = x.real - return complex(real, imag) - - - at builtinify -def phase(x): - x = _to_complex(x) - return math.atan2(x.imag, x.real) - - - at builtinify -def polar(x): - x = _to_complex(x) - phi = math.atan2(x.imag, x.real) - r = abs(x) - return r, phi - - - at builtinify -def rect(r, phi): - return complex(r * math.cos(phi), r * math.sin(phi)) - - - at builtinify -def acos(x): - """acos(x) - - Return the arc cosine of x.""" - - x = _to_complex(x) - return -(_prodi(log((x+(_i*sqrt((_one-(x*x)))))))) - - - at builtinify -def acosh(x): - """acosh(x) - - Return the hyperbolic arccosine of x.""" - - x = _to_complex(x) - z = log(_sqrt_half*(sqrt(x+_one)+sqrt(x-_one))) - return z+z - - - at builtinify -def asin(x): - """asin(x) - - Return the arc sine of x.""" - - x = _to_complex(x) - # -i * log[(sqrt(1-x**2) + i*x] - squared = x*x - sqrt_1_minus_x_sq = sqrt(_one-squared) - return -(_prodi(log((sqrt_1_minus_x_sq+_prodi(x))))) - - - at builtinify -def asinh(x): - """asinh(x) - - Return the hyperbolic arc sine of x.""" - - x = _to_complex(x) - z = log((_sqrt_half * (sqrt(x+_i)+sqrt((x-_i))) )) - return z+z - - - at builtinify -def atan(x): - """atan(x) - - Return the arc tangent of x.""" - - x = _to_complex(x) - return _halfi*log(((_i+x)/(_i-x))) - - - at builtinify -def atanh(x): - """atanh(x) - - Return the hyperbolic arc tangent of x.""" - - x = _to_complex(x) - return _half*log((_one+x)/(_one-x)) - - - at builtinify -def cos(x): - """cos(x) - - Return the cosine of x.""" - - x = _to_complex(x) - real = math.cos(x.real) * math.cosh(x.imag) - imag = -math.sin(x.real) * math.sinh(x.imag) - return complex(real, imag) - - - at builtinify -def cosh(x): - """cosh(x) - - Return the hyperbolic cosine of x.""" - - x = _to_complex(x) - real = math.cos(x.imag) * math.cosh(x.real) - imag = math.sin(x.imag) * math.sinh(x.real) - return complex(real, imag) - - - at builtinify -def exp(x): - """exp(x) - - Return the exponential value e**x.""" - - x = _to_complex(x) - l = math.exp(x.real) - real = l * math.cos(x.imag) - imag = l * math.sin(x.imag) - return complex(real, imag) - - - at builtinify -def log(x, base=None): - """log(x) - - Return the natural logarithm of x.""" - - if base is not None: - return log(x) / log(base) - x = _to_complex(x) - l = math.hypot(x.real,x.imag) - imag = math.atan2(x.imag, x.real) - real = math.log(l) - return complex(real, imag) - - - at builtinify -def log10(x): - """log10(x) - - Return the base-10 logarithm of x.""" - - x = _to_complex(x) - l = math.hypot(x.real, x.imag) - imag = math.atan2(x.imag, x.real)/math.log(10.) - real = math.log10(l) - return complex(real, imag) - - - at builtinify -def sin(x): - """sin(x) - - Return the sine of x.""" - - x = _to_complex(x) - real = math.sin(x.real) * math.cosh(x.imag) - imag = math.cos(x.real) * math.sinh(x.imag) - return complex(real, imag) - - - at builtinify -def sinh(x): - """sinh(x) - - Return the hyperbolic sine of x.""" - - x = _to_complex(x) - real = math.cos(x.imag) * math.sinh(x.real) - imag = math.sin(x.imag) * math.cosh(x.real) - return complex(real, imag) - - - at builtinify -def sqrt(x): - """sqrt(x) - - Return the square root of x.""" - - x = _to_complex(x) - if x.real == 0. and x.imag == 0.: - real, imag = 0, 0 - else: - s = math.sqrt(0.5*(math.fabs(x.real) + math.hypot(x.real,x.imag))) - d = 0.5*x.imag/s - if x.real > 0.: - real = s - imag = d - elif x.imag >= 0.: - real = d - imag = s - else: - real = -d - imag = -s - return complex(real, imag) - -_sqrt_half = sqrt(_half) - - - at builtinify -def tan(x): - """tan(x) - - Return the tangent of x.""" - - x = _to_complex(x) - sr = math.sin(x.real) - cr = math.cos(x.real) - shi = math.sinh(x.imag) - chi = math.cosh(x.imag) - rs = sr * chi - is_ = cr * shi - rc = cr * chi - ic = -sr * shi - d = rc*rc + ic * ic - real = (rs*rc + is_*ic) / d - imag = (is_*rc - rs*ic) / d - return complex(real, imag) - - - at builtinify -def tanh(x): - """tanh(x) - - Return the hyperbolic tangent of x.""" - - x = _to_complex(x) - si = math.sin(x.imag) - ci = math.cos(x.imag) - shr = math.sinh(x.real) - chr = math.cosh(x.real) - rs = ci * shr - is_ = si * chr - rc = ci * chr - ic = si * shr - d = rc*rc + ic*ic - real = (rs*rc + is_*ic) / d - imag = (is_*rc - rs*ic) / d - return complex(real, imag) - -def isnan(x): - """isnan(z) -> bool - Checks if the real or imaginary part of z not a number (NaN)""" - x = _to_complex(x) - return math.isnan(x.real) or math.isnan(x.imag) - -def isinf(x): - """isnan(z) -> bool - Checks if the real or imaginary part of z is infinite""" - x = _to_complex(x) - return math.isinf(x.real) or math.isinf(x.imag) diff --git a/pypy/module/readline/c_readline.py b/pypy/module/readline/c_readline.py deleted file mode 100644 --- a/pypy/module/readline/c_readline.py +++ /dev/null @@ -1,77 +0,0 @@ -from pypy.rpython.tool import rffi_platform as platform -from pypy.rpython.lltypesystem import lltype, rffi -from pypy.interpreter.error import OperationError -from pypy.interpreter.gateway import ObjSpace, interp2app -from pypy.translator.tool.cbuild import ExternalCompilationInfo - -# On various platforms, linking only with libreadline is not enough; -# we also need to link with some variant of curses or libtermcap. -# We follow the logic of CPython below. -def try_with_lib(extralibs, **kwds): - global most_recent_error - # at least on Gentoo Linux, readline.h doesn't compile if stdio.h is not - # included before - eci = ExternalCompilationInfo( - includes = ["stdio.h", "readline/readline.h", "readline/history.h"], - libraries = extralibs + ['readline'], - ) - try: - platform.verify_eci(eci) - return eci - except platform.CompilationError, e: - most_recent_error = e - return None - -eci = (try_with_lib([]) or - try_with_lib(['ncursesw']) or - try_with_lib(['ncurses']) or - try_with_lib(['curses']) or - try_with_lib(['termcap'], library_dirs=['/usr/lib/termcap'])) -if eci is None: - raise most_recent_error - -# ____________________________________________________________ - -def external(name, args, result): - return rffi.llexternal(name, args, result, compilation_info=eci) - -# get a binding to c library functions and define their args and return types -# char *readline(char *) -c_readline = external('readline', [rffi.CCHARP], rffi.CCHARP) - -# void rl_initiliaze(void) -c_rl_initialize = external('rl_initialize', [], lltype.Void) - -# void using_history(void) -c_using_history = external('using_history', [], lltype.Void) - -# void add_history(const char *) -c_add_history = external('add_history', [rffi.CCHARP], lltype.Void) - -#------------------------------------------------------------ -# special initialization of readline - -class ReadlineState(object): - lastline = "" # XXX possibly temporary hack -readlinestate = ReadlineState() - -def setup_readline(space, w_module): - c_using_history() - # XXX CPython initializes more stuff here - c_rl_initialize() - # install sys.__raw_input__, a hook that will be used by raw_input() - space.setitem(space.sys.w_dict, space.wrap('__raw_input__'), - space.wrap(app_readline_func)) - -def readline_func(space, prompt): - ll_res = c_readline(prompt) - if not ll_res: - raise OperationError(space.w_EOFError, space.w_None) - res = rffi.charp2str(ll_res) - if res and res != readlinestate.lastline: - readlinestate.lastline = res - c_add_history(res) - return space.wrap(res) - -readline_func.unwrap_spec = [ObjSpace, str] -app_readline_func = interp2app(readline_func) diff --git a/pypy/module/readline/app_stub.py b/pypy/module/readline/app_stub.py deleted file mode 100644 --- a/pypy/module/readline/app_stub.py +++ /dev/null @@ -1,13 +0,0 @@ -# NOT_RPYTHON - -def stub(*args, **kwds): - import warnings - warnings.warn("the 'readline' module is only a stub so far") - -def stub_str(*args, **kwds): - stub() - return '' - -def stub_int(*args, **kwds): - stub() - return 0 diff --git a/pypy/translator/c/src/math.c b/pypy/translator/c/src/math.c deleted file mode 100644 --- a/pypy/translator/c/src/math.c +++ /dev/null @@ -1,256 +0,0 @@ -/* Definitions of some C99 math library functions, for those platforms - that don't implement these functions already. */ - -#include - -/* The following macros are copied from CPython header files */ - -#ifdef _MSC_VER -#include -#define PyPy_IS_NAN _isnan -#define PyPy_IS_INFINITY(X) (!_finite(X) && !_isnan(X)) -#define copysign _copysign -#else -#define PyPy_IS_NAN(X) ((X) != (X)) -#define PyPy_IS_INFINITY(X) ((X) && \ - (Py_FORCE_DOUBLE(X)*0.5 == Py_FORCE_DOUBLE(X))) -#endif - -#undef PyPy_NAN - -int -_pypy_math_isinf(double x) -{ - return PyPy_IS_INFINITY(x); -} - -int -_pypy_math_isnan(double x) -{ - return PyPy_IS_NAN(x); -} - -/* The following copyright notice applies to the original - implementations of acosh, asinh and atanh. */ - -/* - * ==================================================== - * Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved. - * - * Developed at SunPro, a Sun Microsystems, Inc. business. - * Permission to use, copy, modify, and distribute this - * software is freely granted, provided that this notice - * is preserved. - * ==================================================== - */ - -double _pypy_math_log1p(double x); - -static const double ln2 = 6.93147180559945286227E-01; -static const double two_pow_m28 = 3.7252902984619141E-09; /* 2**-28 */ -static const double two_pow_p28 = 268435456.0; /* 2**28 */ -static const double zero = 0.0; - -/* acosh(x) - * Method : - * Based on - * acosh(x) = log [ x + sqrt(x*x-1) ] - * we have - * acosh(x) := log(x)+ln2, if x is large; else - * acosh(x) := log(2x-1/(sqrt(x*x-1)+x)) if x>2; else - * acosh(x) := log1p(t+sqrt(2.0*t+t*t)); where t=x-1. - * - * Special cases: - * acosh(x) is NaN with signal if x<1. - * acosh(NaN) is NaN without signal. - */ - -double -_pypy_math_acosh(double x) -{ - if (PyPy_IS_NAN(x)) { - return x+x; - } - if (x < 1.) { /* x < 1; return a signaling NaN */ - errno = EDOM; -#ifdef PyPy_NAN - return PyPy_NAN; -#else - return (x-x)/(x-x); -#endif - } - else if (x >= two_pow_p28) { /* x > 2**28 */ - if (PyPy_IS_INFINITY(x)) { - return x+x; - } else { - return log(x)+ln2; /* acosh(huge)=log(2x) */ - } - } - else if (x == 1.) { - return 0.0; /* acosh(1) = 0 */ - } - else if (x > 2.) { /* 2 < x < 2**28 */ - double t = x*x; - return log(2.0*x - 1.0 / (x + sqrt(t - 1.0))); - } - else { /* 1 < x <= 2 */ - double t = x - 1.0; - return _pypy_math_log1p(t + sqrt(2.0*t + t*t)); - } -} - - -/* asinh(x) - * Method : - * Based on - * asinh(x) = sign(x) * log [ |x| + sqrt(x*x+1) ] - * we have - * asinh(x) := x if 1+x*x=1, - * := sign(x)*(log(x)+ln2)) for large |x|, else - * := sign(x)*log(2|x|+1/(|x|+sqrt(x*x+1))) if|x|>2, else - * := sign(x)*log1p(|x| + x^2/(1 + sqrt(1+x^2))) - */ - -double -_pypy_math_asinh(double x) -{ - double w; - double absx = fabs(x); - - if (PyPy_IS_NAN(x) || PyPy_IS_INFINITY(x)) { - return x+x; - } - if (absx < two_pow_m28) { /* |x| < 2**-28 */ - return x; /* return x inexact except 0 */ - } - if (absx > two_pow_p28) { /* |x| > 2**28 */ - w = log(absx)+ln2; - } - else if (absx > 2.0) { /* 2 < |x| < 2**28 */ - w = log(2.0*absx + 1.0 / (sqrt(x*x + 1.0) + absx)); - } - else { /* 2**-28 <= |x| < 2= */ - double t = x*x; - w = _pypy_math_log1p(absx + t / (1.0 + sqrt(1.0 + t))); - } - return copysign(w, x); - -} - -/* atanh(x) - * Method : - * 1.Reduced x to positive by atanh(-x) = -atanh(x) - * 2.For x>=0.5 - * 1 2x x - * atanh(x) = --- * log(1 + -------) = 0.5 * log1p(2 * --------) - * 2 1 - x 1 - x - * - * For x<0.5 - * atanh(x) = 0.5*log1p(2x+2x*x/(1-x)) - * - * Special cases: - * atanh(x) is NaN if |x| >= 1 with signal; - * atanh(NaN) is that NaN with no signal; - * - */ - -double -_pypy_math_atanh(double x) -{ - double absx; - double t; - - if (PyPy_IS_NAN(x)) { - return x+x; - } - absx = fabs(x); - if (absx >= 1.) { /* |x| >= 1 */ - errno = EDOM; -#ifdef PyPy_NAN - return PyPy_NAN; -#else - return x/zero; -#endif - } - if (absx < two_pow_m28) { /* |x| < 2**-28 */ - return x; - } - if (absx < 0.5) { /* |x| < 0.5 */ - t = absx+absx; - t = 0.5 * _pypy_math_log1p(t + t*absx / (1.0 - absx)); - } - else { /* 0.5 <= |x| <= 1.0 */ - t = 0.5 * _pypy_math_log1p((absx + absx) / (1.0 - absx)); - } - return copysign(t, x); -} - -/* Mathematically, expm1(x) = exp(x) - 1. The expm1 function is designed - to avoid the significant loss of precision that arises from direct - evaluation of the expression exp(x) - 1, for x near 0. */ - -double -_pypy_math_expm1(double x) -{ - /* For abs(x) >= log(2), it's safe to evaluate exp(x) - 1 directly; this - also works fine for infinities and nans. - - For smaller x, we can use a method due to Kahan that achieves close to - full accuracy. - */ - - if (fabs(x) < 0.7) { - double u; - u = exp(x); - if (u == 1.0) - return x; - else - return (u - 1.0) * x / log(u); - } - else - return exp(x) - 1.0; -} - -/* log1p(x) = log(1+x). The log1p function is designed to avoid the - significant loss of precision that arises from direct evaluation when x is - small. */ - -double -_pypy_math_log1p(double x) -{ - /* For x small, we use the following approach. Let y be the nearest float - to 1+x, then - - 1+x = y * (1 - (y-1-x)/y) - - so log(1+x) = log(y) + log(1-(y-1-x)/y). Since (y-1-x)/y is tiny, the - second term is well approximated by (y-1-x)/y. If abs(x) >= - DBL_EPSILON/2 or the rounding-mode is some form of round-to-nearest - then y-1-x will be exactly representable, and is computed exactly by - (y-1)-x. - - If abs(x) < DBL_EPSILON/2 and the rounding mode is not known to be - round-to-nearest then this method is slightly dangerous: 1+x could be - rounded up to 1+DBL_EPSILON instead of down to 1, and in that case - y-1-x will not be exactly representable any more and the result can be - off by many ulps. But this is easily fixed: for a floating-point - number |x| < DBL_EPSILON/2., the closest floating-point number to - log(1+x) is exactly x. - */ - - double y; - if (fabs(x) < DBL_EPSILON/2.) { - return x; - } else if (-0.5 <= x && x <= 1.) { - /* WARNING: it's possible than an overeager compiler - will incorrectly optimize the following two lines - to the equivalent of "return log(1.+x)". If this - happens, then results from log1p will be inaccurate - for small x. */ - y = 1.+x; - return log(y)-((y-1.)-x)/y; - } else { - /* NaNs and infinities should end up here */ - return log(1.+x); - } -} diff --git a/pypy/translator/c/test/test_genc.py b/pypy/translator/c/test/test_genc.py --- a/pypy/translator/c/test/test_genc.py +++ b/pypy/translator/c/test/test_genc.py @@ -272,25 +272,39 @@ res = f1(3) assert res == 1.5 -def test_nan(): - from pypy.translator.c.primitive import isnan, isinf +def test_nan_and_special_values(): + from pypy.rlib.rfloat import isnan, isinf, copysign inf = 1e300 * 1e300 assert isinf(inf) nan = inf/inf assert isnan(nan) - l = [nan] - def f(): - return nan - f1 = compile(f, []) - res = f1() - assert isnan(res) + for value, checker in [ + (inf, lambda x: isinf(x) and x > 0.0), + (-inf, lambda x: isinf(x) and x < 0.0), + (nan, isnan), + (0.0, lambda x: not x and copysign(1., x) == 1.), + (-0.0, lambda x: not x and copysign(1., x) == -1.), + ]: + def f(): + return value + f1 = compile(f, []) + res = f1() + assert checker(res) - def g(x): - return l[x] - g2 = compile(g, [int]) - res = g2(0) - assert isnan(res) + l = [value] + def g(x): + return l[x] + g2 = compile(g, [int]) + res = g2(0) + assert checker(res) + + l2 = [(-value, -value), (value, value)] + def h(x): + return l2[x][1] + h3 = compile(h, [int]) + res = h3(1) + assert checker(res) def test_prebuilt_instance_with_dict(): class A: diff --git a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py --- a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py +++ b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py @@ -248,6 +248,7 @@ assert res == 3 def test_remote_sys(self): + skip("Fix me some day maybe") import sys protocol = self.test_env({'sys':sys}) diff --git a/pypy/module/readline/test/test_with_pypy.py b/pypy/module/readline/test/test_with_pypy.py deleted file mode 100644 --- a/pypy/module/readline/test/test_with_pypy.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Test the readline library on top of PyPy. The following tests run -in the PyPy interpreter, itself running on top of CPython -""" - -import py -from pypy.conftest import gettestobjspace -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -class AppTestReadline: - - def setup_class(cls): - # enable usage of the readline mixedmodule - space = gettestobjspace(usemodules=('readline',)) - cls.space = space - - def test_basic_import(self): - # this is interpreted by PyPy - import readline - readline.readline - # XXX test more diff --git a/pypy/module/mmap/app_mmap.py b/pypy/module/mmap/app_mmap.py deleted file mode 100644 --- a/pypy/module/mmap/app_mmap.py +++ /dev/null @@ -1,5 +0,0 @@ -ACCESS_READ = 1 -ACCESS_WRITE = 2 -ACCESS_COPY = 3 - -error = EnvironmentError diff --git a/pypy/jit/backend/test/test_random.py b/pypy/jit/backend/test/test_random.py --- a/pypy/jit/backend/test/test_random.py +++ b/pypy/jit/backend/test/test_random.py @@ -9,6 +9,7 @@ from pypy.jit.metainterp.resoperation import ResOperation, rop from pypy.jit.metainterp.executor import execute_nonspec from pypy.jit.metainterp.resoperation import opname +from pypy.jit.codewriter import longlong class PleaseRewriteMe(Exception): pass @@ -100,7 +101,8 @@ ## 'ConstAddr(llmemory.cast_ptr_to_adr(%s_vtable), cpu)' ## % name) elif isinstance(v, ConstFloat): - args.append('ConstFloat(%r)' % v.value) + args.append('ConstFloat(longlong.getfloatstorage(%r))' + % v.getfloat()) elif isinstance(v, ConstInt): args.append('ConstInt(%s)' % v.value) else: @@ -182,8 +184,8 @@ if hasattr(self.loop, 'inputargs'): for i, v in enumerate(self.loop.inputargs): if isinstance(v, (BoxFloat, ConstFloat)): - print >>s, ' cpu.set_future_value_float(%d, %r)' % (i, - v.value) + print >>s, (' cpu.set_future_value_float(%d,' + 'longlong.getfloatstorage(%r))' % (i, v.getfloat())) else: print >>s, ' cpu.set_future_value_int(%d, %d)' % (i, v.value) @@ -194,8 +196,8 @@ fail_args = self.should_fail_by.getfailargs() for i, v in enumerate(fail_args): if isinstance(v, (BoxFloat, ConstFloat)): - print >>s, (' assert cpu.get_latest_value_float(%d) == %r' - % (i, v.value)) + print >>s, (' assert longlong.getrealfloat(' + 'cpu.get_latest_value_float(%d)) == %r' % (i, v.value)) else: print >>s, (' assert cpu.get_latest_value_int(%d) == %d' % (i, v.value)) @@ -244,7 +246,7 @@ elif r.random() < 0.75 or not builder.cpu.supports_floats: self.put(builder, [ConstInt(r.random_integer())]) else: - self.put(builder, [ConstFloat(r.random_float())]) + self.put(builder, [ConstFloat(r.random_float_storage())]) class BinaryOperation(AbstractOperation): def __init__(self, opnum, and_mask=-1, or_mask=0, boolres=False): @@ -302,16 +304,16 @@ raise CannotProduceOperation k = r.random() if k < 0.18: - v_first = ConstFloat(r.random_float()) + v_first = ConstFloat(r.random_float_storage()) else: v_first = r.choice(builder.floatvars) if k > 0.82: - v_second = ConstFloat(r.random_float()) + v_second = ConstFloat(r.random_float_storage()) else: v_second = r.choice(builder.floatvars) - if abs(v_first.value) > 1E100 or abs(v_second.value) > 1E100: + if abs(v_first.getfloat()) > 1E100 or abs(v_second.getfloat()) > 1E100: raise CannotProduceOperation # avoid infinities - if abs(v_second.value) < 1E-100: + if abs(v_second.getfloat()) < 1E-100: raise CannotProduceOperation # e.g. division by zero error self.put(builder, [v_first, v_second]) @@ -330,7 +332,7 @@ if not builder.floatvars: raise CannotProduceOperation box = r.choice(builder.floatvars) - if not (-sys.maxint-1 <= box.value <= sys.maxint): + if not (-sys.maxint-1 <= box.getfloat() <= sys.maxint): raise CannotProduceOperation # would give an overflow self.put(builder, [box]) @@ -480,9 +482,13 @@ if k < 1.0: x += k return x + def get_random_float_storage(): + x = get_random_float() + return longlong.getfloatstorage(x) r.random_integer = get_random_integer r.random_char = get_random_char r.random_float = get_random_float + r.random_float_storage = get_random_float_storage return r def get_cpu(): @@ -516,7 +522,7 @@ at_least_once = 0 for i in range(pytest.config.option.n_vars): if r.random() < k and i != at_least_once: - startvars.append(BoxFloat(r.random_float())) + startvars.append(BoxFloat(r.random_float_storage())) else: startvars.append(BoxInt(r.random_integer())) assert len(dict.fromkeys(startvars)) == len(startvars) diff --git a/pypy/module/signal/app_signal.py b/pypy/module/signal/app_signal.py deleted file mode 100644 --- a/pypy/module/signal/app_signal.py +++ /dev/null @@ -1,10 +0,0 @@ - - -def default_int_handler(signum, frame): - """ - default_int_handler(...) - - The default handler for SIGINT installed by Python. - It raises KeyboardInterrupt. - """ - raise KeyboardInterrupt() diff --git a/pypy/module/itertools/test/errors.txt b/pypy/module/itertools/test/errors.txt deleted file mode 100644 --- a/pypy/module/itertools/test/errors.txt +++ /dev/null @@ -1,67 +0,0 @@ - - -Here are the remaining errors of CPython 2.5's test_itertools. FWIW I -consider them all as obscure undocumented implementation details. - - -====================================================================== -ERROR: test_islice (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "test_itertools.py", line 285, in test_islice - self.assertRaises(ValueError, islice, xrange(10), 'a') - File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 322, in failUnlessRaises - return - File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 320, in failUnlessRaises - callableObj(*args, **kwargs) -TypeError: expected integer, got str object - -====================================================================== -ERROR: test_tee (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 376, in test_tee - c = type(a)('def') -TypeError: default __new__ takes no parameters - -====================================================================== -ERROR: test_repeat (__main__.LengthTransparency) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 690, in test_repeat - from test.test_iterlen import len -ImportError: cannot import name 'len' - -====================================================================== -ERROR: test_keywords_in_subclass (__main__.SubclassWithKwargsTest) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 760, in test_keywords_in_subclass - class Subclass(cls): -TypeError: type 'repeat' is not an acceptable base class - -====================================================================== -FAIL: test_count (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 59, in test_count - self.assertEqual(repr(c), 'count(3)') -AssertionError: '' != 'count(3)' - -====================================================================== -FAIL: test_izip (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 199, in test_izip - self.assertEqual(min(ids), max(ids)) -AssertionError: 149283404 != 150789644 - -====================================================================== -FAIL: test_repeat (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 214, in test_repeat - self.assertEqual(repr(r), 'repeat((1+0j))') -AssertionError: '' != 'repeat((1+0j))' - ----------------------------------------------------------------------- diff --git a/pypy/module/math/_genmath.py b/pypy/module/math/_genmath.py deleted file mode 100644 --- a/pypy/module/math/_genmath.py +++ /dev/null @@ -1,62 +0,0 @@ -# ONESHOT SCRIPT (probably can go away soon) -# to generate the mixed module 'math' (see same directory) -import py -import math -import re -import sys -rex_arg = re.compile(".*\((.*)\).*") - -if __name__ == '__main__': - print py.code.Source(""" - import math - from pypy.interpreter.gateway import ObjSpace - - """) - names = [] - for name, func in math.__dict__.items(): - if not callable(func): - continue - sig = func.__doc__.split('\n')[0].strip() - sig = sig.split('->')[0].strip() - m = rex_arg.match(sig) - assert m - args = m.group(1) - args = ", ".join(args.split(',')) - sig = sig.replace('(', '(space,') - sig = ", ".join(sig.split(',')) - argc = len(args.split(',')) - unwrap_spec = ['ObjSpace'] - unwrap_spec += ['float'] * argc - unwrap_spec = ", ".join(unwrap_spec) - doc = func.__doc__.replace('\n', '\n ') - - print py.code.Source(''' - def %(sig)s: - """%(doc)s - """ - return space.wrap(math.%(name)s(%(args)s)) - %(name)s.unwrap_spec = [%(unwrap_spec)s] - ''' % locals()) - names.append(name) - - print >>sys.stderr, py.code.Source(""" - # Package initialisation - from pypy.interpreter.mixedmodule import MixedModule - - class Module(MixedModule): - appleveldefs = { - } - interpleveldefs = { - """) - - for name in names: - space = " " * (15-len(name)) - print >>sys.stderr, ( - " %(name)r%(space)s: 'interp_math.%(name)s'," % locals()) - print >>sys.stderr, py.code.Source(""" - } - """) - - - - diff --git a/pypy/module/zlib/app_zlib.py b/pypy/module/zlib/app_zlib.py deleted file mode 100644 --- a/pypy/module/zlib/app_zlib.py +++ /dev/null @@ -1,11 +0,0 @@ - -""" -Application-level definitions for the zlib module. - -NOT_RPYTHON -""" - -class error(Exception): - """ - Raised by zlib operations. - """ diff --git a/pypy/tool/pytest/appsupport.py b/pypy/tool/pytest/appsupport.py --- a/pypy/tool/pytest/appsupport.py +++ b/pypy/tool/pytest/appsupport.py @@ -176,10 +176,7 @@ w_BuiltinAssertionError = space.getitem(space.builtin.w_dict, space.wrap('AssertionError')) w_metaclass = space.type(w_BuiltinAssertionError) - w_init = space.wrap(gateway.interp2app_temp(my_init, - unwrap_spec=[gateway.ObjSpace, - gateway.W_Root, - gateway.Arguments])) + w_init = space.wrap(gateway.interp2app_temp(my_init)) w_dict = space.newdict() space.setitem(w_dict, space.wrap('__init__'), w_init) return space.call_function(w_metaclass, @@ -247,11 +244,7 @@ raise OperationError(space.w_AssertionError, space.wrap("DID NOT RAISE")) -app_raises = gateway.interp2app_temp(pypyraises, - unwrap_spec=[gateway.ObjSpace, - gateway.W_Root, - gateway.W_Root, - gateway.Arguments]) +app_raises = gateway.interp2app_temp(pypyraises) def pypyskip(space, w_message): """skip a test at app-level. """ diff --git a/pypy/module/parser/app_helpers.py b/pypy/module/parser/app_helpers.py deleted file mode 100644 --- a/pypy/module/parser/app_helpers.py +++ /dev/null @@ -1,2 +0,0 @@ -class ParserError(Exception): - pass diff --git a/pypy/module/readline/test/__init__.py b/pypy/module/readline/test/__init__.py deleted file mode 100644 --- a/pypy/module/readline/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/module/select/app_select.py b/pypy/module/select/app_select.py deleted file mode 100644 --- a/pypy/module/select/app_select.py +++ /dev/null @@ -1,2 +0,0 @@ -class error(Exception): - pass diff --git a/pypy/objspace/std/test/helper.py b/pypy/objspace/std/test/helper.py deleted file mode 100644 --- a/pypy/objspace/std/test/helper.py +++ /dev/null @@ -1,69 +0,0 @@ -def raises(excp, func, *args): - try: - func(*args) - assert 1 == 0 - except excp:pass - -def assertEqual(a, b): - assert a == b - -def assertNotEqual(a, b): - assert a != b - -def assertIs(a, b): - assert a is b - -# complex specific tests - -EPS = 1e-9 - -def assertAlmostEqual(a, b): - if isinstance(a, complex): - if isinstance(b, complex): - assert a.real - b.real < EPS - assert a.imag - b.imag < EPS - else: - assert a.real - b < EPS - assert a.imag < EPS - else: - if isinstance(b, complex): - assert a - b.real < EPS - assert b.imag < EPS - else: - assert a - b < EPS - -def assertCloseAbs(x, y, eps=1e-9): - """Return true iff floats x and y "are close\"""" - # put the one with larger magnitude second - if abs(x) > abs(y): - x, y = y, x - if y == 0: - return abs(x) < eps - if x == 0: - return abs(y) < eps - # check that relative difference < eps - assert abs((x-y)/y) < eps - -def assertClose(x, y, eps=1e-9): - """Return true iff complexes x and y "are close\"""" - assertCloseAbs(x.real, y.real, eps) - assertCloseAbs(x.imag, y.imag, eps) - - -def check_div(x, y): - """Compute complex z=x*y, and check that z/x==y and z/y==x.""" - z = x * y - if x != 0: - q = z / x - assertClose(q, y) - q = z.__div__(x) - assertClose(q, y) - q = z.__truediv__(x) - assertClose(q, y) - if y != 0: - q = z / y - assertClose(q, x) - q = z.__div__(y) - assertClose(q, x) - q = z.__truediv__(y) - assertClose(q, x) diff --git a/pypy/module/termios/app_termios.py b/pypy/module/termios/app_termios.py deleted file mode 100644 --- a/pypy/module/termios/app_termios.py +++ /dev/null @@ -1,3 +0,0 @@ - -class error(Exception): - pass diff --git a/pypy/test_all.py b/pypy/test_all.py --- a/pypy/test_all.py +++ b/pypy/test_all.py @@ -1,4 +1,18 @@ #! /usr/bin/env python +""" +PyPy Test runner interface +-------------------------- + +Running test_all.py is equivalent to running py.test +(either installed from the py lib package, or from ../py/bin/). + +For more information, use test_all.py -h. +""" +import sys, os + +if len(sys.argv) == 1 and os.path.dirname(sys.argv[0]) in '.': + print >> sys.stderr, __doc__ + sys.exit(2) if __name__ == '__main__': import tool.autopath diff --git a/pypy/module/_ssl/app_ssl.py b/pypy/module/_ssl/app_ssl.py deleted file mode 100644 --- a/pypy/module/_ssl/app_ssl.py +++ /dev/null @@ -1,7 +0,0 @@ -import _socket - -class SSLError(_socket.error): - pass - -__doc__ = """Implementation module for SSL socket operations. -See the socket module for documentation.""" diff --git a/pypy/module/pyexpat/app_pyexpat.py b/pypy/module/pyexpat/app_pyexpat.py deleted file mode 100644 --- a/pypy/module/pyexpat/app_pyexpat.py +++ /dev/null @@ -1,6 +0,0 @@ -class ExpatError(Exception): - def __init__(self, msg, code, lineno, colno): - Exception.__init__(self, msg) - self.code = code - self.lineno = lineno - self.colno = colno diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -192,7 +192,7 @@ RegrTest('test_dbm.py'), RegrTest('test_decimal.py'), RegrTest('test_decorators.py', core=True), - RegrTest('test_deque.py', core=True), + RegrTest('test_deque.py', core=True, usemodules='_collections'), RegrTest('test_descr.py', core=True, usemodules='_weakref'), RegrTest('test_descrtut.py', core=True), RegrTest('test_dict.py', core=True), @@ -419,7 +419,7 @@ RegrTest('test_sundry.py'), RegrTest('test_symtable.py', skip="implementation detail"), RegrTest('test_syntax.py', core=True), - RegrTest('test_sys.py', core=True), + RegrTest('test_sys.py', core=True, usemodules='struct'), RegrTest('test_sys_settrace.py', core=True), RegrTest('test_sys_setprofile.py', core=True), RegrTest('test_sysconfig.py'), @@ -495,8 +495,8 @@ RegrTest('test_coding.py'), RegrTest('test_complex_args.py'), RegrTest('test_contextlib.py', usemodules="thread"), - RegrTest('test_ctypes.py', usemodules="_rawffi"), - RegrTest('test_defaultdict.py'), + RegrTest('test_ctypes.py', usemodules="_rawffi thread"), + RegrTest('test_defaultdict.py', usemodules='_collections'), RegrTest('test_email_renamed.py'), RegrTest('test_exception_variations.py'), RegrTest('test_float.py'), @@ -516,8 +516,8 @@ RegrTest('test_with.py'), RegrTest('test_wsgiref.py'), RegrTest('test_xdrlib.py'), - RegrTest('test_xml_etree.py', skip="unsupported ext module"), - RegrTest('test_xml_etree_c.py', skip="unsupported ext module"), + RegrTest('test_xml_etree.py'), + RegrTest('test_xml_etree_c.py'), RegrTest('test_zipfile64.py'), ] @@ -672,6 +672,8 @@ cmd += ' --pdb' if self.config.option.capture == 'no': status = os.system(cmd) + stdout.write('') + stderr.write('') else: status = os.system("%s >>%s 2>>%s" %(cmd, stdout, stderr)) if os.WIFEXITED(status): @@ -690,8 +692,10 @@ if test_stderr.rfind(26*"=" + "skipped" + 26*"=") != -1: skipped = True outcome = 'OK' - if not exit_status: - if 'FAIL' in test_stdout or re.search('[^:]ERROR', test_stderr): + if not exit_status: + # match "FAIL" but not e.g. "FAILURE", which is in the output of a + # test in test_zipimport_support.py + if re.search(r'\bFAIL\b', test_stdout) or re.search('[^:]ERROR', test_stderr): outcome = 'FAIL' exit_status = 2 elif timedout: diff --git a/pypy/module/cpyext/include/modsupport.inl b/pypy/module/cpyext/include/modsupport.inl deleted file mode 100644 --- a/pypy/module/cpyext/include/modsupport.inl +++ /dev/null @@ -1,29 +0,0 @@ -/* -*- C -*- */ -/* Module support interface */ - -#ifndef Py_MODSUPPORT_INL -#define Py_MODSUPPORT_INL -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef PYPY_STANDALONE -/* XXX1 On translation, forwarddecl.h is included after this file */ -/* XXX2 genc.py transforms "const char*" into "char*" */ -extern PyObject *_Py_InitPyPyModule(char *, PyMethodDef *, char *, PyObject *, int); -#endif - -Py_LOCAL_INLINE(PyObject *) Py_InitModule4( - const char* name, PyMethodDef* methods, - const char* doc, PyObject *self, - int api_version) -{ - return _Py_InitPyPyModule((char*)name, methods, - (char*)doc, self, - api_version); -} - -#ifdef __cplusplus -} -#endif -#endif /* !Py_MODSUPPORT_INL */ diff --git a/pypy/doc/config/objspace.usemodules.readline.txt b/pypy/doc/config/objspace.usemodules.readline.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.readline.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'readline' module. diff --git a/pypy/module/binascii/app_binascii.py b/pypy/module/binascii/app_binascii.py deleted file mode 100644 --- a/pypy/module/binascii/app_binascii.py +++ /dev/null @@ -1,6 +0,0 @@ - -class Error(Exception): - pass - -class Incomplete(Exception): - pass diff --git a/pypy/module/readline/interp_readline.py b/pypy/module/readline/interp_readline.py deleted file mode 100644 --- a/pypy/module/readline/interp_readline.py +++ /dev/null @@ -1,23 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.baseobjspace import ObjSpace - -from pypy.module.readline import c_readline -from pypy.rpython.lltypesystem import rffi - -#------------------------------------------------------------ -# exported API (see interpleveldefs in __init__.py) -# -def readline(space, prompt): - return space.wrap(rffi.charp2str(c_readline.c_readline(prompt))) -readline.unwrap_spec = [ObjSpace, str] - -def setcompleter(space, w_callback): - """Set or remove the completer function. - The function is called as function(text, state), - for state in 0, 1, 2, ..., until it returns a non-string. - It should return the next possible completion starting with 'text'. - """ - # XXX set internal completion function - diff --git a/pypy/module/zipimport/app_zipimport.py b/pypy/module/zipimport/app_zipimport.py deleted file mode 100644 --- a/pypy/module/zipimport/app_zipimport.py +++ /dev/null @@ -1,4 +0,0 @@ - -class ZipImportError(ImportError): - pass - diff --git a/pypy/conftest.py b/pypy/conftest.py --- a/pypy/conftest.py +++ b/pypy/conftest.py @@ -155,7 +155,7 @@ def str_w(self, w_str): return w_str - def newdict(self): + def newdict(self, module=None): return {} def newtuple(self, iterable): From commits-noreply at bitbucket.org Sun Mar 6 09:26:36 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sun, 6 Mar 2011 09:26:36 +0100 (CET) Subject: [pypy-svn] pypy pytest2: update to current py and pytest trunk Message-ID: <20110306082636.55E44282C19@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42444:61aefc3c60b5 Date: 2011-03-06 09:25 +0100 http://bitbucket.org/pypy/pypy/changeset/61aefc3c60b5/ Log: update to current py and pytest trunk diff --git a/_pytest/mark.py b/_pytest/mark.py --- a/_pytest/mark.py +++ b/_pytest/mark.py @@ -89,8 +89,8 @@ class MarkDecorator: """ A decorator for test functions and test classes. When applied it will create :class:`MarkInfo` objects which may be - :ref:`retrieved by hooks as item keywords` MarkDecorator instances - are usually created by writing:: + :ref:`retrieved by hooks as item keywords `. + MarkDecorator instances are often created like this:: mark1 = py.test.mark.NAME # simple MarkDecorator mark2 = py.test.mark.NAME(name1=value) # parametrized MarkDecorator diff --git a/_pytest/tmpdir.py b/_pytest/tmpdir.py --- a/_pytest/tmpdir.py +++ b/_pytest/tmpdir.py @@ -59,7 +59,7 @@ def pytest_funcarg__tmpdir(request): """return a temporary directory path object - unique to each test function invocation, + which is unique to each test function invocation, created as a sub directory of the base temporary directory. The returned object is a `py.path.local`_ path object. diff --git a/_pytest/main.py b/_pytest/main.py --- a/_pytest/main.py +++ b/_pytest/main.py @@ -121,9 +121,6 @@ def compatproperty(name): def fget(self): - #print "retrieving %r property from %s" %(name, self.fspath) - py.log._apiwarn("2.0", "use pytest.%s for " - "test collection and item classes" % name) return getattr(pytest, name) return property(fget, None, None, "deprecated attribute %r, use pytest.%s" % (name,name)) @@ -157,6 +154,14 @@ File = compatproperty("File") Item = compatproperty("Item") + def _getcustomclass(self, name): + cls = getattr(self, name) + if cls != getattr(pytest, name): + py.log._apiwarn("2.0", "use of node.%s is deprecated, " + "use pytest_pycollect_makeitem(...) to create custom " + "collection nodes" % name) + return cls + def __repr__(self): return "<%s %r>" %(self.__class__.__name__, getattr(self, 'name', None)) @@ -449,7 +454,7 @@ p = p.dirpath() else: p = p.new(basename=p.purebasename+".py") - return p + return str(p) def _parsearg(self, arg): """ return (fspath, names) tuple after checking the file exists. """ @@ -495,9 +500,15 @@ node.ihook.pytest_collectstart(collector=node) rep = node.ihook.pytest_make_collect_report(collector=node) if rep.passed: + has_matched = False for x in rep.result: if x.name == name: resultnodes.extend(self.matchnodes([x], nextnames)) + has_matched = True + # XXX accept IDs that don't have "()" for class instances + if not has_matched and len(rep.result) == 1 and x.name == "()": + nextnames.insert(0, name) + resultnodes.extend(self.matchnodes([x], nextnames)) node.ihook.pytest_collectreport(report=rep) return resultnodes diff --git a/py/__init__.py b/py/__init__.py --- a/py/__init__.py +++ b/py/__init__.py @@ -8,7 +8,7 @@ (c) Holger Krekel and others, 2004-2010 """ -__version__ = '1.4.1.dev2' +__version__ = '1.4.2.dev0' from py import _apipkg @@ -145,4 +145,3 @@ }, }) - diff --git a/_pytest/capture.py b/_pytest/capture.py --- a/_pytest/capture.py +++ b/_pytest/capture.py @@ -192,18 +192,16 @@ return rep def pytest_funcarg__capsys(request): - """captures writes to sys.stdout/sys.stderr and makes - them available successively via a ``capsys.readouterr()`` method - which returns a ``(out, err)`` tuple of captured snapshot strings. + """enables capturing of writes to sys.stdout/sys.stderr and makes + captured output available via ``capsys.readouterr()`` method calls + which return a ``(out, err)`` tuple. """ return CaptureFuncarg(py.io.StdCapture) def pytest_funcarg__capfd(request): - """captures writes to file descriptors 1 and 2 and makes - snapshotted ``(out, err)`` string tuples available - via the ``capsys.readouterr()`` method. If the underlying - platform does not have ``os.dup`` (e.g. Jython) tests using - this funcarg will automatically skip. + """enables capturing of writes to file descriptors 1 and 2 and makes + captured output available via ``capsys.readouterr()`` method calls + which return a ``(out, err)`` tuple. """ if not hasattr(os, 'dup'): py.test.skip("capfd funcarg needs os.dup") diff --git a/_pytest/python.py b/_pytest/python.py --- a/_pytest/python.py +++ b/_pytest/python.py @@ -73,7 +73,8 @@ if collector._istestclasscandidate(name, obj): #if hasattr(collector.obj, 'unittest'): # return # we assume it's a mixin class for a TestCase derived one - return collector.Class(name, parent=collector) + Class = collector._getcustomclass("Class") + return Class(name, parent=collector) elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): if is_generator(obj): return Generator(name, parent=collector) @@ -213,16 +214,18 @@ extra.append(cls()) plugins = self.getplugins() + extra gentesthook.pcall(plugins, metafunc=metafunc) + Function = self._getcustomclass("Function") if not metafunc._calls: - return self.Function(name, parent=self) + return Function(name, parent=self) l = [] for callspec in metafunc._calls: subname = "%s[%s]" %(name, callspec.id) - function = self.Function(name=subname, parent=self, + function = Function(name=subname, parent=self, callspec=callspec, callobj=funcobj, keywords={callspec.id:True}) l.append(function) return l + class Module(pytest.File, PyCollectorMixin): def _getobj(self): return self._memoizedcall('_obj', self._importtestmodule) @@ -272,7 +275,7 @@ class Class(PyCollectorMixin, pytest.Collector): def collect(self): - return [self.Instance(name="()", parent=self)] + return [self._getcustomclass("Instance")(name="()", parent=self)] def setup(self): setup_class = getattr(self.obj, 'setup_class', None) @@ -297,13 +300,8 @@ class FunctionMixin(PyobjMixin): """ mixin for the code common to Function and Generator. """ - def setup(self): """ perform setup for this test function. """ - if inspect.ismethod(self.obj): - name = 'setup_method' - else: - name = 'setup_function' if hasattr(self, '_preservedparent'): obj = self._preservedparent elif isinstance(self.parent, Instance): @@ -311,6 +309,10 @@ self.obj = self._getobj() else: obj = self.parent.obj + if inspect.ismethod(self.obj): + name = 'setup_method' + else: + name = 'setup_function' setup_func_or_method = getattr(obj, name, None) if setup_func_or_method is not None: setup_func_or_method(self.obj) @@ -487,10 +489,11 @@ return True -def getfuncargnames(function): +def getfuncargnames(function, startindex=None): # XXX merge with main.py's varnames argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0] - startindex = py.std.inspect.ismethod(function) and 1 or 0 + if startindex is None: + startindex = py.std.inspect.ismethod(function) and 1 or 0 defaults = getattr(function, 'func_defaults', getattr(function, '__defaults__', None)) or () numdefaults = len(defaults) @@ -519,7 +522,8 @@ self.config = config self.module = module self.function = function - self.funcargnames = getfuncargnames(function) + self.funcargnames = getfuncargnames(function, + startindex=int(cls is not None)) self.cls = cls self.module = module self._calls = [] @@ -527,7 +531,11 @@ def addcall(self, funcargs=None, id=_notexists, param=_notexists): """ add a new call to the underlying test function during the - collection phase of a test run. + collection phase of a test run. Note that request.addcall() is + called during the test collection phase prior and independently + to actual test execution. Therefore you should perform setup + of resources in a funcarg factory which can be instrumented + with the ``param``. :arg funcargs: argument keyword dictionary used when invoking the test function. @@ -537,14 +545,15 @@ list of calls to the test function will be used. :arg param: will be exposed to a later funcarg factory invocation - through the ``request.param`` attribute. Setting it (instead of - directly providing a ``funcargs`` ditionary) is called - *indirect parametrization*. Indirect parametrization is - preferable if test values are expensive to setup or can - only be created after certain fixtures or test-run related - initialization code has been run. + through the ``request.param`` attribute. It allows to + defer test fixture setup activities to when an actual + test is run. """ assert funcargs is None or isinstance(funcargs, dict) + if funcargs is not None: + for name in funcargs: + if name not in self.funcargnames: + pytest.fail("funcarg %r not used in this function." % name) if id is None: raise ValueError("id=None not allowed") if id is _notexists: @@ -556,7 +565,13 @@ self._calls.append(CallSpec(funcargs, id, param)) class FuncargRequest: - """ A request for function arguments from a test function. """ + """ A request for function arguments from a test function. + + Note that there is an optional ``param`` attribute in case + there was an invocation to metafunc.addcall(param=...). + If no such call was done in a ``pytest_generate_tests`` + hook, the attribute will not be present. + """ _argprefix = "pytest_funcarg__" _argname = None diff --git a/_pytest/monkeypatch.py b/_pytest/monkeypatch.py --- a/_pytest/monkeypatch.py +++ b/_pytest/monkeypatch.py @@ -14,8 +14,8 @@ monkeypatch.delenv(name, value, raising=True) monkeypatch.syspath_prepend(path) - All modifications will be undone when the requesting - test function finished its execution. The ``raising`` + All modifications will be undone after the requesting + test function has finished. The ``raising`` parameter determines if a KeyError or AttributeError will be raised if the set/deletion operation has no target. """ diff --git a/_pytest/terminal.py b/_pytest/terminal.py --- a/_pytest/terminal.py +++ b/_pytest/terminal.py @@ -25,7 +25,7 @@ group._addoption('--tb', metavar="style", action="store", dest="tbstyle", default='long', type="choice", choices=['long', 'short', 'no', 'line', 'native'], - help="traceback print mode (long/short/line/no).") + help="traceback print mode (long/short/line/native/no).") group._addoption('--fulltrace', action="store_true", dest="fulltrace", default=False, help="don't cut any tracebacks (default is to cut).") diff --git a/py/_path/local.py b/py/_path/local.py --- a/py/_path/local.py +++ b/py/_path/local.py @@ -158,11 +158,13 @@ def samefile(self, other): """ return True if 'other' references the same file as 'self'. """ + if not iswin32: + return py.error.checked_call( + os.path.samefile, str(self), str(other)) if self == other: return True - if not iswin32: - return py.error.checked_call(os.path.samefile, str(self), str(other)) - return False + other = os.path.abspath(str(other)) + return self == other def remove(self, rec=1, ignore_errors=False): """ remove a file or directory (or a directory tree if rec=1). @@ -747,7 +749,7 @@ pass try: os.symlink(src, dest) - except (OSError, AttributeError): # AttributeError on win32 + except (OSError, AttributeError, NotImplementedError): pass return udir diff --git a/_pytest/unittest.py b/_pytest/unittest.py --- a/_pytest/unittest.py +++ b/_pytest/unittest.py @@ -102,6 +102,10 @@ def runtest(self): self._testcase(result=self) + def _prunetraceback(self, excinfo): + pytest.Function._prunetraceback(self, excinfo) + excinfo.traceback = excinfo.traceback.filter(lambda x:not x.frame.f_globals.get('__unittest')) + @pytest.mark.tryfirst def pytest_runtest_makereport(item, call): if isinstance(item, TestCaseFunction): diff --git a/_pytest/recwarn.py b/_pytest/recwarn.py --- a/_pytest/recwarn.py +++ b/_pytest/recwarn.py @@ -8,6 +8,9 @@ * ``pop(category=None)``: return last warning matching the category. * ``clear()``: clear list of warnings + + See http://docs.python.org/library/warnings.html for information + on warning categories. """ if sys.version_info >= (2,7): import warnings diff --git a/py/_code/source.py b/py/_code/source.py --- a/py/_code/source.py +++ b/py/_code/source.py @@ -215,7 +215,7 @@ msglines = self.lines[:ex.lineno] if ex.offset: msglines.append(" "*ex.offset + '^') - msglines.append("syntax error probably generated here: %s" % filename) + msglines.append("(code was compiled probably from here: %s)" % filename) newex = SyntaxError('\n'.join(msglines)) newex.offset = ex.offset newex.lineno = ex.lineno diff --git a/_pytest/skipping.py b/_pytest/skipping.py --- a/_pytest/skipping.py +++ b/_pytest/skipping.py @@ -1,6 +1,7 @@ """ support for skip/xfail functions and markers. """ import py, pytest +import sys def pytest_addoption(parser): group = parser.getgroup("general") @@ -32,9 +33,39 @@ return bool(self.holder) __nonzero__ = __bool__ + def wasvalid(self): + return not hasattr(self, 'exc') + def istrue(self): + try: + return self._istrue() + except KeyboardInterrupt: + raise + except: + self.exc = sys.exc_info() + if isinstance(self.exc[1], SyntaxError): + msg = [" " * (self.exc[1].offset + 4) + "^",] + msg.append("SyntaxError: invalid syntax") + else: + msg = py.std.traceback.format_exception_only(*self.exc[:2]) + pytest.fail("Error evaluating %r expression\n" + " %s\n" + "%s" + %(self.name, self.expr, "\n".join(msg)), + pytrace=False) + + def _getglobals(self): + d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} + func = self.item.obj + try: + d.update(func.__globals__) + except AttributeError: + d.update(func.func_globals) + return d + + def _istrue(self): if self.holder: - d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} + d = self._getglobals() if self.holder.args: self.result = False for expr in self.holder.args: @@ -42,7 +73,7 @@ if isinstance(expr, str): result = cached_eval(self.item.config, expr, d) else: - result = expr + pytest.fail("expression is not a string") if result: self.result = True self.expr = expr @@ -60,7 +91,7 @@ if not hasattr(self, 'expr'): return "" else: - return "condition: " + self.expr + return "condition: " + str(self.expr) return expl @@ -99,16 +130,17 @@ return rep rep = __multicall__.execute() evalxfail = item._evalxfail - if not item.config.option.runxfail and evalxfail.istrue(): - if call.excinfo: - rep.outcome = "skipped" - rep.keywords['xfail'] = evalxfail.getexplanation() - elif call.when == "call": - rep.outcome = "failed" - rep.keywords['xfail'] = evalxfail.getexplanation() - else: - if 'xfail' in rep.keywords: - del rep.keywords['xfail'] + if not item.config.option.runxfail: + if evalxfail.wasvalid() and evalxfail.istrue(): + if call.excinfo: + rep.outcome = "skipped" + rep.keywords['xfail'] = evalxfail.getexplanation() + elif call.when == "call": + rep.outcome = "failed" + rep.keywords['xfail'] = evalxfail.getexplanation() + return rep + if 'xfail' in rep.keywords: + del rep.keywords['xfail'] return rep # called by terminalreporter progress reporting @@ -179,7 +211,8 @@ except KeyError: #import sys #print >>sys.stderr, ("cache-miss: %r" % expr) - config._evalcache[expr] = x = eval(expr, d) + exprcode = py.code.compile(expr, mode="eval") + config._evalcache[expr] = x = eval(exprcode, d) return x diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -1,7 +1,7 @@ """ unit and functional testing with Python. """ -__version__ = '2.0.1.dev9' +__version__ = '2.0.2.dev4' __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins diff --git a/_pytest/assertion.py b/_pytest/assertion.py --- a/_pytest/assertion.py +++ b/_pytest/assertion.py @@ -12,7 +12,7 @@ help="disable python assert expression reinterpretation."), def pytest_configure(config): - # The _pytesthook attribute on the AssertionError is used by + # The _reprcompare attribute on the py.code module is used by # py._code._assertionnew to detect this plugin was loaded and in # turn call the hooks defined here as part of the # DebugInterpreter. @@ -51,7 +51,7 @@ def pytest_assertrepr_compare(op, left, right): """return specialised explanations for some operators/operands""" width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op - left_repr = py.io.saferepr(left, maxsize=width/2) + left_repr = py.io.saferepr(left, maxsize=int(width/2)) right_repr = py.io.saferepr(right, maxsize=width-len(left_repr)) summary = '%s %s %s' % (left_repr, op, right_repr) @@ -165,4 +165,15 @@ head = text[:index] tail = text[index+len(term):] correct_text = head + tail - return _diff_text(correct_text, text) + diff = _diff_text(correct_text, text) + newdiff = ['%s is contained here:' % py.io.saferepr(term, maxsize=42)] + for line in diff: + if line.startswith('Skipping'): + continue + if line.startswith('- '): + continue + if line.startswith('+ '): + newdiff.append(' ' + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/py/_code/_assertionnew.py b/py/_code/_assertionnew.py --- a/py/_code/_assertionnew.py +++ b/py/_code/_assertionnew.py @@ -267,20 +267,9 @@ result = self.frame.eval(co, **ns) except Exception: raise Failure(explanation) - # Only show result explanation if it's not a builtin call or returns a - # bool. - if not isinstance(call.func, ast.Name) or \ - not self._is_builtin_name(call.func): - source = "isinstance(__exprinfo_value, bool)" - co = self._compile(source) - try: - is_bool = self.frame.eval(co, __exprinfo_value=result) - except Exception: - is_bool = False - if not is_bool: - pattern = "%s\n{%s = %s\n}" - rep = self.frame.repr(result) - explanation = pattern % (rep, rep, explanation) + pattern = "%s\n{%s = %s\n}" + rep = self.frame.repr(result) + explanation = pattern % (rep, rep, explanation) return explanation, result def _is_builtin_name(self, name): diff --git a/py/_error.py b/py/_error.py --- a/py/_error.py +++ b/py/_error.py @@ -37,6 +37,8 @@ _errno2class = {} def __getattr__(self, name): + if name[0] == "_": + raise AttributeError(name) eno = getattr(errno, name) cls = self._geterrnoclass(eno) setattr(self, name, cls) From commits-noreply at bitbucket.org Sun Mar 6 19:37:33 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sun, 6 Mar 2011 19:37:33 +0100 (CET) Subject: [pypy-svn] pypy pytest2: inline update to latest py version Message-ID: <20110306183733.ED7FF282BDE@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42445:beccba93862b Date: 2011-03-06 18:44 +0100 http://bitbucket.org/pypy/pypy/changeset/beccba93862b/ Log: inline update to latest py version diff --git a/_pytest/terminal.py b/_pytest/terminal.py --- a/_pytest/terminal.py +++ b/_pytest/terminal.py @@ -32,22 +32,19 @@ def pytest_configure(config): config.option.verbose -= config.option.quiet - if config.option.collectonly: - reporter = CollectonlyReporter(config) - else: - # we try hard to make printing resilient against - # later changes on FD level. - stdout = py.std.sys.stdout - if hasattr(os, 'dup') and hasattr(stdout, 'fileno'): - try: - newfd = os.dup(stdout.fileno()) - #print "got newfd", newfd - except ValueError: - pass - else: - stdout = os.fdopen(newfd, stdout.mode, 1) - config._toclose = stdout - reporter = TerminalReporter(config, stdout) + # we try hard to make printing resilient against + # later changes on FD level. + stdout = py.std.sys.stdout + if hasattr(os, 'dup') and hasattr(stdout, 'fileno'): + try: + newfd = os.dup(stdout.fileno()) + #print "got newfd", newfd + except ValueError: + pass + else: + stdout = os.fdopen(newfd, stdout.mode, 1) + config._toclose = stdout + reporter = TerminalReporter(config, stdout) config.pluginmanager.register(reporter, 'terminalreporter') if config.option.debug or config.option.traceconfig: def mywriter(tags, args): @@ -273,11 +270,44 @@ for line in flatten(lines): self.write_line(line) - def pytest_collection_finish(self): + def pytest_collection_finish(self, session): + if self.config.option.collectonly: + self._printcollecteditems(session.items) + if self.stats.get('failed'): + self._tw.sep("!", "collection failures") + for rep in self.stats.get('failed'): + rep.toterminal(self._tw) + return 1 + return 0 if not self.showheader: return #for i, testarg in enumerate(self.config.args): # self.write_line("test path %d: %s" %(i+1, testarg)) + + def _printcollecteditems(self, items): + # to print out items and their parent collectors + # we take care to leave out Instances aka () + # because later versions are going to get rid of them anyway + if self.config.option.verbose < 0: + for item in items: + nodeid = item.nodeid + nodeid = nodeid.replace("::()::", "::") + self._tw.line(nodeid) + return + stack = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[:len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack):]: + stack.append(col) + #if col.name == "()": + # continue + indent = (len(stack)-1) * " " + self._tw.line("%s%s" %(indent, col)) def pytest_sessionfinish(self, exitstatus, __multicall__): __multicall__.execute() @@ -403,52 +433,6 @@ self.write_sep("=", "%d tests deselected by %r" %( len(self.stats['deselected']), self.config.option.keyword), bold=True) - -class CollectonlyReporter: - INDENT = " " - - def __init__(self, config, out=None): - self.config = config - if out is None: - out = py.std.sys.stdout - self._tw = py.io.TerminalWriter(out) - self.indent = "" - self._failed = [] - - def outindent(self, line): - self._tw.line(self.indent + str(line)) - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self._tw.line("INTERNALERROR> " + line) - - def pytest_collectstart(self, collector): - if collector.session != collector: - self.outindent(collector) - self.indent += self.INDENT - - def pytest_itemcollected(self, item): - self.outindent(item) - - def pytest_collectreport(self, report): - if not report.passed: - if hasattr(report.longrepr, 'reprcrash'): - msg = report.longrepr.reprcrash.message - else: - # XXX unify (we have CollectErrorRepr here) - msg = str(report.longrepr[2]) - self.outindent("!!! %s !!!" % msg) - #self.outindent("!!! error !!!") - self._failed.append(report) - self.indent = self.indent[:-len(self.INDENT)] - - def pytest_collection_finish(self): - if self._failed: - self._tw.sep("!", "collection failures") - for rep in self._failed: - rep.toterminal(self._tw) - return self._failed and 1 or 0 - def repr_pythonversion(v=None): if v is None: v = sys.version_info diff --git a/py/__init__.py b/py/__init__.py --- a/py/__init__.py +++ b/py/__init__.py @@ -8,7 +8,7 @@ (c) Holger Krekel and others, 2004-2010 """ -__version__ = '1.4.2.dev0' +__version__ = '1.4.2' from py import _apipkg From commits-noreply at bitbucket.org Sun Mar 6 19:37:35 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sun, 6 Mar 2011 19:37:35 +0100 (CET) Subject: [pypy-svn] pypy pytest2: remove py/bin/py.test script because we neccessarily have pytest.py at root level anyway. Message-ID: <20110306183735.9FAA1282BDE@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42446:cc8117bbdad0 Date: 2011-03-06 19:36 +0100 http://bitbucket.org/pypy/pypy/changeset/cc8117bbdad0/ Log: remove py/bin/py.test script because we neccessarily have pytest.py at root level anyway. the root pytest.py is now a modified version of the file from the pytest distribution because I want to disable warnings for people who have installed py.test indepedently from PyPy. Also fix up the docs a bit more to hint / move towards this possibility. also fix the docs to point to "pytest.py" instead of "pypy/test_all.py" which should die at some point. diff --git a/py/bin/py.test b/py/bin/py.test deleted file mode 100755 --- a/py/bin/py.test +++ /dev/null @@ -1,18 +0,0 @@ -#!/usr/bin/env python - -# XXX integrate into pypy/test_all.py -# somewhat PYPY specific hack: -# let's make sure setuptools does show a warning when our inlined 'py' -# version shadows a properly installed one. -import warnings -warnings.filterwarnings("ignore", - "Module py was already imported", category=UserWarning) -warnings.filterwarnings("ignore", - "Module _pytest was already imported", - category=UserWarning) -warnings.filterwarnings("ignore", - "Module pytest was already imported", - category=UserWarning) -from _findpy import py -import pytest -pytest.main() diff --git a/pypy/doc/project-ideas.txt b/pypy/doc/project-ideas.txt --- a/pypy/doc/project-ideas.txt +++ b/pypy/doc/project-ideas.txt @@ -81,8 +81,6 @@ .. _`efficient propagators for specialized finite domains`: http://codespeak.net/svn/pypy/extradoc/soc-2006/constraints.txt -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html .. _`object spaces`: objspace.html .. _`code templating solution`: http://codespeak.net/svn/pypy/extradoc/soc-2006/code-templating.txt diff --git a/py/bin/_findpy.py b/py/bin/_findpy.py deleted file mode 100644 --- a/py/bin/_findpy.py +++ /dev/null @@ -1,37 +0,0 @@ -#!/usr/bin/env python - -# -# find and import a version of 'py' -# -import sys -import os -from os.path import dirname as opd, exists, join, basename, abspath - -def searchpy(current): - while 1: - last = current - initpy = join(current, '__init__.py') - if not exists(initpy): - pydir = join(current, 'py') - # recognize py-package and ensure it is importable - if exists(pydir) and exists(join(pydir, '__init__.py')): - #for p in sys.path: - # if p == current: - # return True - if current != sys.path[0]: # if we are already first, then ok - sys.stderr.write("inserting into sys.path: %s\n" % current) - sys.path.insert(0, current) - return True - current = opd(current) - if last == current: - return False - -if not searchpy(abspath(os.curdir)): - if not searchpy(opd(abspath(sys.argv[0]))): - if not searchpy(opd(__file__)): - pass # let's hope it is just on sys.path - -import py - -if __name__ == '__main__': - print ("py lib is at %s" % py.__file__) diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt --- a/pypy/doc/getting-started-dev.txt +++ b/pypy/doc/getting-started-dev.txt @@ -207,32 +207,52 @@ Running PyPy's unit tests ------------------------- -The PyPy project uses test-driven-development. Right now, there are -a couple of different categories of tests which you can run. -To run all the unit tests:: +PyPy development always was and is still thorougly test-driven. +We use the flexible `py.test testing tool`_ which you can `install independently +`_ and use indepedently +from PyPy for other projects. - cd pypy - python test_all.py +The PyPy source tree comes with an inlined version of ``py.test`` +which you can invoke by typing:: -(this is not recommended, since it takes hours and uses huge amounts of RAM). -Alternatively, you may run subtests by going to the correct subdirectory -and running them individually:: + python pytest.py -h - python test_all.py interpreter/test/test_pyframe.py +This is usually equivalent to using an installed version:: -``test_all.py`` is actually just a synonym for `py.test`_ which is -our external testing tool. If you have installed that you -can as well just issue ``py.test DIRECTORY_OR_FILE`` in order -to perform test runs or simply start it without arguments to -run all tests below the current directory. + py.test -h -Finally, there are the CPython regression tests which you can -run like this (this will take hours and hours and hours):: +If you encounter problems with the installed version +make sure you have the correct version installed which +you can find out with the ``--version`` switch. - cd lib-python/2.5.2/test - python ../../../pypy/test_all.py +Now on to running some tests. PyPy has many different test directories +and you can use shell completion to point at directories or files:: -.. _`installed py.test`: https://codespeak.net/py/current/doc/download.html + py.test pypy/interpreter/test/test_pyframe.py + + # or for running tests of a whole subdirectory + py.test pypy/interpreter/ + +See `py.test usage and invocations`_ for some more generic info +on how you can run tests. + +Beware trying to run "all" pypy tests by pointing to the root +directory or even the top level subdirectory ``pypy``. It takes +hours and uses huge amounts of RAM and is not recommended. + +To run CPython regression tests you can point to the ``lib-python`` +directory:: + + py.test lib-python/2.7.0/test/test_datetime.py + +This will usually take a long time because this will run +the PyPy Python interpreter on top of CPython. On the plus +side, it's usually still faster than doing a full translation +and running the regression test with the translated PyPy Python +interpreter. + +.. _`py.test testing tool`: http://pytest.org +.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage Special Introspection Features of the Untranslated Python Interpreter --------------------------------------------------------------------- @@ -345,14 +365,13 @@ py.test and the py lib +++++++++++++++++++++++ -The `py library`_ is used for supporting PyPy development and -running our tests against code and documentation as well as -compliance tests. You don't need to install the py library because -it ships with PyPy and `pypy/test_all.py`_ is an alias for ``py.test`` -but if you want to have the ``py.test`` tool generally in your -path, you might like to visit: +The `py.test testing tool`_ drives all our testing needs. - http://codespeak.net/py/dist/download.html +We use the `py library`_ for filesystem path manipulations, terminal +writing, logging and some other support functionality. + +You don't neccessarily need to install these two libraries because +we also ship them inlined in the PyPy source tree. Getting involved ----------------- @@ -370,7 +389,7 @@ .. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev .. _`contact possibilities`: index.html -.. _`py library`: http://codespeak.net/py +.. _`py library`: http://pylib.org .. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -1,12 +1,30 @@ """ unit and functional testing with Python. +(pypy version of startup script) """ -__version__ = '2.0.2.dev4' +__version__ = '2.0.2.dev5' # base pytest version __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins from _pytest import core as cmdline +# This pytest.py script is located in the pypy source tree +# which has a copy of pytest and py within its source tree. +# If the environment also has an installed version of pytest/py +# we are bound to get warnings so we disable them. +# XXX eventually pytest and py should not be inlined shipped +# with the pypy source code but become a requirement for installation. + +import warnings +warnings.filterwarnings("ignore", + "Module py was already imported", category=UserWarning) +warnings.filterwarnings("ignore", + "Module _pytest was already imported", + category=UserWarning) +warnings.filterwarnings("ignore", + "Module pytest was already imported", + category=UserWarning) + if __name__ == '__main__': # if run as a script or by 'python -m pytest' raise SystemExit(main()) else: From commits-noreply at bitbucket.org Sun Mar 6 20:31:13 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 20:31:13 +0100 (CET) Subject: [pypy-svn] pypy default: Make a mutable copy of lib2to3 Message-ID: <20110306193113.67DA4282BDE@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42447:52376ee338b6 Date: 2011-03-06 11:23 -0800 http://bitbucket.org/pypy/pypy/changeset/52376ee338b6/ Log: Make a mutable copy of lib2to3 diff --git a/lib-python/2.7.0/lib2to3/pgen2/pgen.py b/lib-python/modified-2.7.0/lib2to3/pgen2/pgen.py copy from lib-python/2.7.0/lib2to3/pgen2/pgen.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/pgen.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_print.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_print.py copy from lib-python/2.7.0/lib2to3/fixes/fix_print.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_print.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_paren.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_paren.py copy from lib-python/2.7.0/lib2to3/fixes/fix_paren.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_paren.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_nonzero.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_nonzero.py copy from lib-python/2.7.0/lib2to3/fixes/fix_nonzero.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_nonzero.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_explicit.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_explicit.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_explicit.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_explicit.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_parser.py b/lib-python/modified-2.7.0/lib2to3/tests/test_parser.py copy from lib-python/2.7.0/lib2to3/tests/test_parser.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_parser.py diff --git a/lib-python/2.7.0/lib2to3/refactor.py b/lib-python/modified-2.7.0/lib2to3/refactor.py copy from lib-python/2.7.0/lib2to3/refactor.py copy to lib-python/modified-2.7.0/lib2to3/refactor.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_imports2.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_imports2.py copy from lib-python/2.7.0/lib2to3/fixes/fix_imports2.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_imports2.py diff --git a/lib-python/2.7.0/lib2to3/main.py b/lib-python/modified-2.7.0/lib2to3/main.py copy from lib-python/2.7.0/lib2to3/main.py copy to lib-python/modified-2.7.0/lib2to3/main.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_map.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_map.py copy from lib-python/2.7.0/lib2to3/fixes/fix_map.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_map.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/bad_order.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/bad_order.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/bad_order.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/bad_order.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_funcattrs.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_funcattrs.py copy from lib-python/2.7.0/lib2to3/fixes/fix_funcattrs.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_funcattrs.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_preorder.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_preorder.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_preorder.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_preorder.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_xreadlines.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_xreadlines.py copy from lib-python/2.7.0/lib2to3/fixes/fix_xreadlines.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_xreadlines.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_urllib.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_urllib.py copy from lib-python/2.7.0/lib2to3/fixes/fix_urllib.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_urllib.py diff --git a/lib-python/2.7.0/lib2to3/patcomp.py b/lib-python/modified-2.7.0/lib2to3/patcomp.py copy from lib-python/2.7.0/lib2to3/patcomp.py copy to lib-python/modified-2.7.0/lib2to3/patcomp.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_apply.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_apply.py copy from lib-python/2.7.0/lib2to3/fixes/fix_apply.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_apply.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_ws_comma.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_ws_comma.py copy from lib-python/2.7.0/lib2to3/fixes/fix_ws_comma.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_ws_comma.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_main.py b/lib-python/modified-2.7.0/lib2to3/tests/test_main.py copy from lib-python/2.7.0/lib2to3/tests/test_main.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_main.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_idioms.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_idioms.py copy from lib-python/2.7.0/lib2to3/fixes/fix_idioms.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_idioms.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_imports.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_imports.py copy from lib-python/2.7.0/lib2to3/fixes/fix_imports.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_imports.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_unicode.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_unicode.py copy from lib-python/2.7.0/lib2to3/fixes/fix_unicode.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_unicode.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_has_key.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_has_key.py copy from lib-python/2.7.0/lib2to3/fixes/fix_has_key.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_has_key.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/tokenize.py b/lib-python/modified-2.7.0/lib2to3/pgen2/tokenize.py copy from lib-python/2.7.0/lib2to3/pgen2/tokenize.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/tokenize.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_itertools_imports.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_itertools_imports.py copy from lib-python/2.7.0/lib2to3/fixes/fix_itertools_imports.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_itertools_imports.py diff --git a/lib-python/2.7.0/lib2to3/__init__.py b/lib-python/modified-2.7.0/lib2to3/__init__.py copy from lib-python/2.7.0/lib2to3/__init__.py copy to lib-python/modified-2.7.0/lib2to3/__init__.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/__init__.py b/lib-python/modified-2.7.0/lib2to3/pgen2/__init__.py copy from lib-python/2.7.0/lib2to3/pgen2/__init__.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/__init__.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/conv.py b/lib-python/modified-2.7.0/lib2to3/pgen2/conv.py copy from lib-python/2.7.0/lib2to3/pgen2/conv.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/conv.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_basestring.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_basestring.py copy from lib-python/2.7.0/lib2to3/fixes/fix_basestring.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_basestring.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_last.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_last.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_last.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_last.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_ne.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_ne.py copy from lib-python/2.7.0/lib2to3/fixes/fix_ne.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_ne.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_itertools.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_itertools.py copy from lib-python/2.7.0/lib2to3/fixes/fix_itertools.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_itertools.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_input.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_input.py copy from lib-python/2.7.0/lib2to3/fixes/fix_input.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_input.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_getcwdu.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_getcwdu.py copy from lib-python/2.7.0/lib2to3/fixes/fix_getcwdu.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_getcwdu.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/no_fixer_cls.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/no_fixer_cls.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/no_fixer_cls.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/no_fixer_cls.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_types.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_types.py copy from lib-python/2.7.0/lib2to3/fixes/fix_types.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_types.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/grammar.py b/lib-python/modified-2.7.0/lib2to3/pgen2/grammar.py copy from lib-python/2.7.0/lib2to3/pgen2/grammar.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/grammar.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_operator.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_operator.py copy from lib-python/2.7.0/lib2to3/fixes/fix_operator.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_operator.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_set_literal.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_set_literal.py copy from lib-python/2.7.0/lib2to3/fixes/fix_set_literal.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_set_literal.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_raise.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_raise.py copy from lib-python/2.7.0/lib2to3/fixes/fix_raise.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_raise.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_reduce.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_reduce.py copy from lib-python/2.7.0/lib2to3/fixes/fix_reduce.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_reduce.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_numliterals.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_numliterals.py copy from lib-python/2.7.0/lib2to3/fixes/fix_numliterals.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_numliterals.py diff --git a/lib-python/2.7.0/lib2to3/fixer_base.py b/lib-python/modified-2.7.0/lib2to3/fixer_base.py copy from lib-python/2.7.0/lib2to3/fixer_base.py copy to lib-python/modified-2.7.0/lib2to3/fixer_base.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_callable.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_callable.py copy from lib-python/2.7.0/lib2to3/fixes/fix_callable.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_callable.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/infinite_recursion.py b/lib-python/modified-2.7.0/lib2to3/tests/data/infinite_recursion.py copy from lib-python/2.7.0/lib2to3/tests/data/infinite_recursion.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/infinite_recursion.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_util.py b/lib-python/modified-2.7.0/lib2to3/tests/test_util.py copy from lib-python/2.7.0/lib2to3/tests/test_util.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_util.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_sys_exc.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_sys_exc.py copy from lib-python/2.7.0/lib2to3/fixes/fix_sys_exc.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_sys_exc.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_zip.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_zip.py copy from lib-python/2.7.0/lib2to3/fixes/fix_zip.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_zip.py diff --git a/lib-python/2.7.0/lib2to3/btm_utils.py b/lib-python/modified-2.7.0/lib2to3/btm_utils.py copy from lib-python/2.7.0/lib2to3/btm_utils.py copy to lib-python/modified-2.7.0/lib2to3/btm_utils.py diff --git a/lib-python/2.7.0/lib2to3/Grammar.txt b/lib-python/modified-2.7.0/lib2to3/Grammar.txt copy from lib-python/2.7.0/lib2to3/Grammar.txt copy to lib-python/modified-2.7.0/lib2to3/Grammar.txt diff --git a/lib-python/2.7.0/lib2to3/pgen2/token.py b/lib-python/modified-2.7.0/lib2to3/pgen2/token.py copy from lib-python/2.7.0/lib2to3/pgen2/token.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/token.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_exec.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_exec.py copy from lib-python/2.7.0/lib2to3/fixes/fix_exec.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_exec.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_renames.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_renames.py copy from lib-python/2.7.0/lib2to3/fixes/fix_renames.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_renames.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_tuple_params.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_tuple_params.py copy from lib-python/2.7.0/lib2to3/fixes/fix_tuple_params.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_tuple_params.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/crlf.py b/lib-python/modified-2.7.0/lib2to3/tests/data/crlf.py copy from lib-python/2.7.0/lib2to3/tests/data/crlf.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/crlf.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/bom.py b/lib-python/modified-2.7.0/lib2to3/tests/data/bom.py copy from lib-python/2.7.0/lib2to3/tests/data/bom.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/bom.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_exitfunc.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_exitfunc.py copy from lib-python/2.7.0/lib2to3/fixes/fix_exitfunc.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_exitfunc.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_fixers.py b/lib-python/modified-2.7.0/lib2to3/tests/test_fixers.py copy from lib-python/2.7.0/lib2to3/tests/test_fixers.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_fixers.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/literals.py b/lib-python/modified-2.7.0/lib2to3/pgen2/literals.py copy from lib-python/2.7.0/lib2to3/pgen2/literals.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/literals.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/README b/lib-python/modified-2.7.0/lib2to3/tests/data/README copy from lib-python/2.7.0/lib2to3/tests/data/README copy to lib-python/modified-2.7.0/lib2to3/tests/data/README diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_except.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_except.py copy from lib-python/2.7.0/lib2to3/fixes/fix_except.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_except.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_filter.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_filter.py copy from lib-python/2.7.0/lib2to3/fixes/fix_filter.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_filter.py diff --git a/lib-python/2.7.0/lib2to3/pygram.py b/lib-python/modified-2.7.0/lib2to3/pygram.py copy from lib-python/2.7.0/lib2to3/pygram.py copy to lib-python/modified-2.7.0/lib2to3/pygram.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/py3_test_grammar.py b/lib-python/modified-2.7.0/lib2to3/tests/data/py3_test_grammar.py copy from lib-python/2.7.0/lib2to3/tests/data/py3_test_grammar.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/py3_test_grammar.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_import.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_import.py copy from lib-python/2.7.0/lib2to3/fixes/fix_import.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_import.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_all_fixers.py b/lib-python/modified-2.7.0/lib2to3/tests/test_all_fixers.py copy from lib-python/2.7.0/lib2to3/tests/test_all_fixers.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_all_fixers.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_pytree.py b/lib-python/modified-2.7.0/lib2to3/tests/test_pytree.py copy from lib-python/2.7.0/lib2to3/tests/test_pytree.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_pytree.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_intern.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_intern.py copy from lib-python/2.7.0/lib2to3/fixes/fix_intern.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_intern.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/py2_test_grammar.py b/lib-python/modified-2.7.0/lib2to3/tests/data/py2_test_grammar.py copy from lib-python/2.7.0/lib2to3/tests/data/py2_test_grammar.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/py2_test_grammar.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_parrot.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_parrot.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_parrot.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_parrot.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/parrot_example.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/parrot_example.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/parrot_example.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/parrot_example.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_metaclass.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_metaclass.py copy from lib-python/2.7.0/lib2to3/fixes/fix_metaclass.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_metaclass.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_first.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_first.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/fix_first.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/fix_first.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/different_encoding.py b/lib-python/modified-2.7.0/lib2to3/tests/data/different_encoding.py copy from lib-python/2.7.0/lib2to3/tests/data/different_encoding.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/different_encoding.py diff --git a/lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/__init__.py b/lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/__init__.py copy from lib-python/2.7.0/lib2to3/tests/data/fixers/myfixes/__init__.py copy to lib-python/modified-2.7.0/lib2to3/tests/data/fixers/myfixes/__init__.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_dict.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_dict.py copy from lib-python/2.7.0/lib2to3/fixes/fix_dict.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_dict.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_isinstance.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_isinstance.py copy from lib-python/2.7.0/lib2to3/fixes/fix_isinstance.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_isinstance.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_long.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_long.py copy from lib-python/2.7.0/lib2to3/fixes/fix_long.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_long.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_raw_input.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_raw_input.py copy from lib-python/2.7.0/lib2to3/fixes/fix_raw_input.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_raw_input.py diff --git a/lib-python/2.7.0/lib2to3/btm_matcher.py b/lib-python/modified-2.7.0/lib2to3/btm_matcher.py copy from lib-python/2.7.0/lib2to3/btm_matcher.py copy to lib-python/modified-2.7.0/lib2to3/btm_matcher.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_standarderror.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_standarderror.py copy from lib-python/2.7.0/lib2to3/fixes/fix_standarderror.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_standarderror.py diff --git a/lib-python/2.7.0/lib2to3/fixes/__init__.py b/lib-python/modified-2.7.0/lib2to3/fixes/__init__.py copy from lib-python/2.7.0/lib2to3/fixes/__init__.py copy to lib-python/modified-2.7.0/lib2to3/fixes/__init__.py diff --git a/lib-python/2.7.0/lib2to3/fixer_util.py b/lib-python/modified-2.7.0/lib2to3/fixer_util.py copy from lib-python/2.7.0/lib2to3/fixer_util.py copy to lib-python/modified-2.7.0/lib2to3/fixer_util.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_buffer.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_buffer.py copy from lib-python/2.7.0/lib2to3/fixes/fix_buffer.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_buffer.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_execfile.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_execfile.py copy from lib-python/2.7.0/lib2to3/fixes/fix_execfile.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_execfile.py diff --git a/lib-python/2.7.0/lib2to3/pytree.py b/lib-python/modified-2.7.0/lib2to3/pytree.py copy from lib-python/2.7.0/lib2to3/pytree.py copy to lib-python/modified-2.7.0/lib2to3/pytree.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_methodattrs.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_methodattrs.py copy from lib-python/2.7.0/lib2to3/fixes/fix_methodattrs.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_methodattrs.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_next.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_next.py copy from lib-python/2.7.0/lib2to3/fixes/fix_next.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_next.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_repr.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_repr.py copy from lib-python/2.7.0/lib2to3/fixes/fix_repr.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_repr.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_xrange.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_xrange.py copy from lib-python/2.7.0/lib2to3/fixes/fix_xrange.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_xrange.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/parse.py b/lib-python/modified-2.7.0/lib2to3/pgen2/parse.py copy from lib-python/2.7.0/lib2to3/pgen2/parse.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/parse.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_throw.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_throw.py copy from lib-python/2.7.0/lib2to3/fixes/fix_throw.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_throw.py diff --git a/lib-python/2.7.0/lib2to3/pgen2/driver.py b/lib-python/modified-2.7.0/lib2to3/pgen2/driver.py copy from lib-python/2.7.0/lib2to3/pgen2/driver.py copy to lib-python/modified-2.7.0/lib2to3/pgen2/driver.py diff --git a/lib-python/2.7.0/lib2to3/tests/support.py b/lib-python/modified-2.7.0/lib2to3/tests/support.py copy from lib-python/2.7.0/lib2to3/tests/support.py copy to lib-python/modified-2.7.0/lib2to3/tests/support.py diff --git a/lib-python/2.7.0/lib2to3/PatternGrammar.txt b/lib-python/modified-2.7.0/lib2to3/PatternGrammar.txt copy from lib-python/2.7.0/lib2to3/PatternGrammar.txt copy to lib-python/modified-2.7.0/lib2to3/PatternGrammar.txt diff --git a/lib-python/2.7.0/lib2to3/tests/pytree_idempotency.py b/lib-python/modified-2.7.0/lib2to3/tests/pytree_idempotency.py copy from lib-python/2.7.0/lib2to3/tests/pytree_idempotency.py copy to lib-python/modified-2.7.0/lib2to3/tests/pytree_idempotency.py diff --git a/lib-python/2.7.0/lib2to3/tests/__init__.py b/lib-python/modified-2.7.0/lib2to3/tests/__init__.py copy from lib-python/2.7.0/lib2to3/tests/__init__.py copy to lib-python/modified-2.7.0/lib2to3/tests/__init__.py diff --git a/lib-python/2.7.0/lib2to3/fixes/fix_future.py b/lib-python/modified-2.7.0/lib2to3/fixes/fix_future.py copy from lib-python/2.7.0/lib2to3/fixes/fix_future.py copy to lib-python/modified-2.7.0/lib2to3/fixes/fix_future.py diff --git a/lib-python/2.7.0/lib2to3/tests/test_refactor.py b/lib-python/modified-2.7.0/lib2to3/tests/test_refactor.py copy from lib-python/2.7.0/lib2to3/tests/test_refactor.py copy to lib-python/modified-2.7.0/lib2to3/tests/test_refactor.py From commits-noreply at bitbucket.org Sun Mar 6 20:31:14 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 6 Mar 2011 20:31:14 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, armin): fix the lib2to3 failure. It was assuming refcounting in very subtle and painful ways. Message-ID: <20110306193114.32FD5282BDE@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42448:392f45550649 Date: 2011-03-06 11:29 -0800 http://bitbucket.org/pypy/pypy/changeset/392f45550649/ Log: (alex, armin): fix the lib2to3 failure. It was assuming refcounting in very subtle and painful ways. diff --git a/lib-python/modified-2.7.0/lib2to3/pytree.py b/lib-python/modified-2.7.0/lib2to3/pytree.py --- a/lib-python/modified-2.7.0/lib2to3/pytree.py +++ b/lib-python/modified-2.7.0/lib2to3/pytree.py @@ -741,11 +741,12 @@ elif self.name == "bare_name": yield self._bare_name_matches(nodes) else: - # The reason for this is that hitting the recursion limit usually - # results in some ugly messages about how RuntimeErrors are being - # ignored. - save_stderr = sys.stderr - sys.stderr = StringIO() + # There used to be some monkey patching of sys.stderr here, to + # silence the error message from the RuntimError, PyPy has removed + # this because it relied on reference counting. This is because the + # caller of this function doesn't consume this generator fully, so + # the finally statement that used to be here would only be executed + # when the gc happened to run. try: for count, r in self._recursive_matches(nodes, 0): if self.name: @@ -758,8 +759,6 @@ if self.name: r[self.name] = nodes[:count] yield count, r - finally: - sys.stderr = save_stderr def _iterative_matches(self, nodes): """Helper to iteratively yield the matches.""" From commits-noreply at bitbucket.org Sun Mar 6 20:43:34 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 6 Mar 2011 20:43:34 +0100 (CET) Subject: [pypy-svn] pypy default: Found and fixed the occasional bug in the lib-python's test_math.py. Message-ID: <20110306194334.763FC36C204@codespeak.net> Author: Armin Rigo Branch: Changeset: r42449:ff00dcfe6309 Date: 2011-03-06 11:43 -0800 http://bitbucket.org/pypy/pypy/changeset/ff00dcfe6309/ Log: Found and fixed the occasional bug in the lib-python's test_math.py. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -4287,8 +4287,11 @@ i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) + i16 = int_rshift(i15, 2) i17 = int_lshift(i1b, 100) i18 = int_rshift(i17, 100) + i19 = int_eq(i1b, i16) + guard_true(i19) [] jump(i2, i3, i1b, i2b) """ self.optimize_loop(ops, expected) diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -130,10 +130,12 @@ r = self.getvalue(op.result) b = v1.intbound.lshift_bound(v2.intbound) r.intbound.intersect(b) - if b.has_lower and b.has_upper: - # Synthesize the reverse op for optimize_default to reuse - self.pure(rop.INT_RSHIFT, [op.result, op.getarg(1)], op.getarg(0)) - + # --- The following is actually wrong if the INT_LSHIFT overflowed. + # --- It is precisely the pattern we use to detect overflows of the + # --- app-level '<<' operator: INT_LSHIFT/INT_RSHIFT/INT_EQ + #if b.has_lower and b.has_upper: + # # Synthesize the reverse op for optimize_default to reuse + # self.pure(rop.INT_RSHIFT, [op.result, op.getarg(1)], op.getarg(0)) def optimize_INT_RSHIFT(self, op): v1 = self.getvalue(op.getarg(0)) From commits-noreply at bitbucket.org Sun Mar 6 23:20:41 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Sun, 06 Mar 2011 22:20:41 -0000 Subject: [pypy-svn] commit/extradoc: arigo: Typo. Message-ID: <20110306222041.27950.8633@bitbucket03.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/1931bb558c06/ changeset: r3353:1931bb558c06 branch: extradoc user: arigo date: 2011-03-06 23:20:08 summary: Typo. affected #: 1 file (0 bytes) --- a/talk/ustour2011/yelp-talk.txt Fri Mar 04 05:50:09 2011 -0500 +++ b/talk/ustour2011/yelp-talk.txt Sun Mar 06 14:20:08 2011 -0800 @@ -188,7 +188,7 @@ * For some extension modules, we can have a performance issue -* Work-in-progress +* Work in progress CPyExt works "often" Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Mon Mar 7 00:42:43 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Sun, 06 Mar 2011 23:42:43 -0000 Subject: [pypy-svn] commit/extradoc: 2 new changesets Message-ID: <20110306234243.27951.97135@bitbucket03.managed.contegix.com> 2 new changesets in extradoc: http://bitbucket.org/pypy/extradoc/changeset/0c88e3152af1/ changeset: r3354:0c88e3152af1 branch: extradoc user: fijal date: 2011-03-07 00:41:59 summary: examples affected #: 2 files (361 bytes) --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/pycon2011/whyslow/examples/frame.py Mon Mar 07 00:41:59 2011 +0100 @@ -0,0 +1,11 @@ + +def f(): + i = 0 + while i < 2000: + i += 1 + print sys._getframe().f_locals + # has to have 'i' as a local + return i + +if __name__ == '__main__': + f() --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/pycon2011/whyslow/examples/tracing.py Mon Mar 07 00:41:59 2011 +0100 @@ -0,0 +1,14 @@ + +def f(): + i = 0 + s = 0 + while i < 3000: + if i % 3 == 0: + s += 1 + else: + s += 2 + i += 1 + return s + +if __name__ == '__main__': + f() http://bitbucket.org/pypy/extradoc/changeset/ad919f219d85/ changeset: r3355:ad919f219d85 branch: extradoc user: fijal date: 2011-03-07 00:42:40 summary: merge heads affected #: 0 files (0 bytes) --- a/planning/jit.txt Mon Mar 07 00:41:59 2011 +0100 +++ b/planning/jit.txt Mon Mar 07 00:42:40 2011 +0100 @@ -76,6 +76,11 @@ maybe we should move promote even higher, before the first use and we could possibly remove more stuff? +- f31 = f17 * f16 + f32 = f16 * f17 + + Should be just a matter of synthesizing reverse operations in rewrite.py + PYTHON EXAMPLES --------------- Binary file talk/ustour2011/speed.png has changed Binary file talk/ustour2011/speed2.png has changed --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/talk/ustour2011/ui Mon Mar 07 00:42:40 2011 +0100 @@ -0,0 +1,1 @@ +../stanford-ee380-2011/ui \ No newline at end of file Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Mon Mar 7 00:54:01 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 7 Mar 2011 00:54:01 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, greg): Make the zipimport logic slightly closer to the CPython logic, in that we cache the file info. This makes it considerably faster to `import pytz`, if pytz is in an egg. Message-ID: <20110306235401.1BA6E36C206@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42450:5914645b3ae3 Date: 2011-03-06 15:50 -0800 http://bitbucket.org/pypy/pypy/changeset/5914645b3ae3/ Log: (alex, greg): Make the zipimport logic slightly closer to the CPython logic, in that we cache the file info. This makes it considerably faster to `import pytz`, if pytz is in an egg. diff --git a/pypy/module/zipimport/interp_zipimport.py b/pypy/module/zipimport/interp_zipimport.py --- a/pypy/module/zipimport/interp_zipimport.py +++ b/pypy/module/zipimport/interp_zipimport.py @@ -54,7 +54,7 @@ w = space.wrap values = {} w_d = space.newdict() - for key, info in w_zipimporter.dir.iteritems(): + for key, info in w_zipimporter.zip_file.NameToInfo.iteritems(): w_values = space.newdict() space.setitem(w_d, w(key), space.newtuple([ w(info.filename), w(info.compress_type), w(info.compress_size), @@ -115,11 +115,11 @@ zip_cache = W_ZipCache() class W_ZipImporter(Wrappable): - def __init__(self, space, name, filename, dir, prefix): + def __init__(self, space, name, filename, zip_file, prefix): self.space = space self.name = name self.filename = filename - self.dir = dir + self.zip_file = zip_file self.prefix = prefix def getprefix(self, space): @@ -154,7 +154,7 @@ def _parse_mtime(self, space, filename): w = space.wrap try: - info = self.dir[filename] + info = self.zip_file.NameToInfo[filename] t = info.date_time except KeyError: return 0 @@ -211,7 +211,7 @@ if ZIPSEP != os.path.sep: filename = filename.replace(os.path.sep, ZIPSEP) try: - self.dir[filename] + self.zip_file.NameToInfo[filename] return True except KeyError: return False @@ -245,11 +245,7 @@ for compiled, is_package, ext in ENUMERATE_EXTS: fname = filename + ext try: - zip_file = RZipFile(self.filename, 'r') - try: - buf = zip_file.read(fname) - finally: - zip_file.close() + buf = self.zip_file.read(fname) except (KeyError, OSError): pass else: @@ -279,11 +275,7 @@ filename = self._find_relative_path(filename) w = space.wrap try: - zip_file = RZipFile(self.filename, 'r') - try: - data = zip_file.read(filename) - finally: - zip_file.close() + data = self.zip_file.read(filename) return w(data) except (KeyError, OSError): raise OperationError(space.w_IOError, space.wrap("Error reading file")) @@ -388,14 +380,12 @@ except (BadZipfile, OSError): raise operationerrfmt(w_ZipImportError, "%s seems not to be a zipfile", filename) - zip_file.close() prefix = name[len(filename):] if prefix.startswith(os.path.sep) or prefix.startswith(ZIPSEP): prefix = prefix[1:] if prefix and not prefix.endswith(ZIPSEP): prefix += ZIPSEP - w_result = space.wrap(W_ZipImporter(space, name, filename, - zip_file.NameToInfo, prefix)) + w_result = space.wrap(W_ZipImporter(space, name, filename, zip_file, prefix)) zip_cache.set(filename, w_result) return w_result @@ -412,4 +402,4 @@ archive = GetSetProperty(W_ZipImporter.getarchive), prefix = GetSetProperty(W_ZipImporter.getprefix), ) - + diff --git a/pypy/rlib/rzipfile.py b/pypy/rlib/rzipfile.py --- a/pypy/rlib/rzipfile.py +++ b/pypy/rlib/rzipfile.py @@ -25,7 +25,7 @@ #/* Note: (crc >> 8) MUST zero fill on left result = crc ^ r_uint(0xffffffffL) - + return result # parts copied from zipfile library implementation @@ -155,13 +155,14 @@ self.NameToInfo = {} if 'b' not in mode: mode += 'b' - fp = open_file_as_stream(zipname, mode, 1024) + fp = self.get_fp() try: self._GetContents(fp) - except: + finally: fp.close() - raise - self.fp = fp + + def get_fp(self): + return open_file_as_stream(self.filename, self.mode, 1024) def _GetContents(self, fp): endrec = _EndRecData(fp) @@ -222,41 +223,41 @@ 'File name in directory "%s" and header "%s" differ.' % ( data.orig_filename, fname) fp.seek(self.start_dir, 0) - + def getinfo(self, filename): """Return the instance of ZipInfo given 'filename'.""" return self.NameToInfo[filename] def read(self, filename): zinfo = self.getinfo(filename) - filepos = self.fp.tell() - self.fp.seek(zinfo.file_offset, 0) - bytes = self.fp.read(intmask(zinfo.compress_size)) - self.fp.seek(filepos, 0) - if zinfo.compress_type == ZIP_STORED: - pass - elif zinfo.compress_type == ZIP_DEFLATED and rzlib is not None: - stream = rzlib.inflateInit(wbits=-15) - try: - bytes, _, _ = rzlib.decompress(stream, bytes) - # need to feed in unused pad byte so that zlib won't choke - ex, _, _ = rzlib.decompress(stream, 'Z') - if ex: - bytes = bytes + ex - finally: - rzlib.inflateEnd(stream) - elif zinfo.compress_type == ZIP_DEFLATED: - raise BadZipfile, \ - "Cannot decompress file, zlib not installed" - else: - raise BadZipfile, \ - "Unsupported compression method %d for file %s" % \ - (zinfo.compress_type, filename) - crc = crc32(bytes) - if crc != zinfo.CRC: - raise BadZipfile, "Bad CRC-32 for file %s" % filename - return bytes - - def close(self): - self.fp.close() - + fp = self.get_fp() + try: + filepos = fp.tell() + fp.seek(zinfo.file_offset, 0) + bytes = fp.read(intmask(zinfo.compress_size)) + fp.seek(filepos, 0) + if zinfo.compress_type == ZIP_STORED: + pass + elif zinfo.compress_type == ZIP_DEFLATED and rzlib is not None: + stream = rzlib.inflateInit(wbits=-15) + try: + bytes, _, _ = rzlib.decompress(stream, bytes) + # need to feed in unused pad byte so that zlib won't choke + ex, _, _ = rzlib.decompress(stream, 'Z') + if ex: + bytes = bytes + ex + finally: + rzlib.inflateEnd(stream) + elif zinfo.compress_type == ZIP_DEFLATED: + raise BadZipfile, \ + "Cannot decompress file, zlib not installed" + else: + raise BadZipfile, \ + "Unsupported compression method %d for file %s" % \ + (zinfo.compress_type, filename) + crc = crc32(bytes) + if crc != zinfo.CRC: + raise BadZipfile, "Bad CRC-32 for file %s" % filename + return bytes + finally: + fp.close() \ No newline at end of file From commits-noreply at bitbucket.org Mon Mar 7 00:54:01 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 7 Mar 2011 00:54:01 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110306235401.78F4F282BDE@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42451:26b79918fc4e Date: 2011-03-06 15:53 -0800 http://bitbucket.org/pypy/pypy/changeset/26b79918fc4e/ Log: Merged upstream. From commits-noreply at bitbucket.org Mon Mar 7 01:18:02 2011 From: commits-noreply at bitbucket.org (Bitbucket) Date: Mon, 07 Mar 2011 00:18:02 -0000 Subject: [pypy-svn] commit/extradoc: fijal: Work on google slides Message-ID: <20110307001802.25680.27968@bitbucket02.managed.contegix.com> 1 new changeset in extradoc: http://bitbucket.org/pypy/extradoc/changeset/35e38ba8da6d/ changeset: r3356:35e38ba8da6d branch: extradoc user: fijal date: 2011-03-07 01:17:50 summary: Work on google slides affected #: 8 files (201.0 KB) Diff too large to display. Repository URL: https://bitbucket.org/pypy/extradoc/ -- This is a commit notification from bitbucket.org. You are receiving this because you have the service enabled, addressing the recipient of this email. From commits-noreply at bitbucket.org Mon Mar 7 03:52:06 2011 From: commits-noreply at bitbucket.org (ademan) Date: Mon, 7 Mar 2011 03:52:06 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed sqlite test CheckInsertStartsTransaction and got a few more successes for free Message-ID: <20110307025206.37747282BEA@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42452:6898544251a0 Date: 2011-03-06 18:51 -0800 http://bitbucket.org/pypy/pypy/changeset/6898544251a0/ Log: Fixed sqlite test CheckInsertStartsTransaction and got a few more successes for free diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -678,8 +678,12 @@ raise self.connection._get_exception(ret) if self.statement.kind == "DQL": - self.statement._readahead() - self.statement._build_row_cast_map() + if ret == SQLITE_ROW: + self.statement._build_row_cast_map() + self.statement._readahead() + else: + self.statement.item = None + self.statement.exhausted = True if self.statement.kind in ("DML", "DDL"): self.statement.reset() @@ -856,7 +860,7 @@ def _build_row_cast_map(self): self.row_cast_map = [] - for i in range(sqlite.sqlite3_column_count(self.statement)): + for i in xrange(sqlite.sqlite3_column_count(self.statement)): converter = None if self.con.detect_types & PARSE_COLNAMES: @@ -969,7 +973,8 @@ self.column_count = sqlite.sqlite3_column_count(self.statement) row = [] for i in xrange(self.column_count): - typ = sqlite.sqlite3_column_type(self.statement, i) + typ = sqlite.sqlite3_column_type(self.statement, i) + converter = self.row_cast_map[i] if converter is None: if typ == SQLITE_INTEGER: From commits-noreply at bitbucket.org Mon Mar 7 09:24:19 2011 From: commits-noreply at bitbucket.org (ademan) Date: Mon, 7 Mar 2011 09:24:19 +0100 (CET) Subject: [pypy-svn] pypy default: Turned enumerate into iter over range to fix sqlite CheckExecuteParamSequence test. Message-ID: <20110307082419.B27F7282BEA@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42453:55b00d420b7f Date: 2011-03-07 00:24 -0800 http://bitbucket.org/pypy/pypy/changeset/55b00d420b7f/ Log: Turned enumerate into iter over range to fix sqlite CheckExecuteParamSequence test. diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -933,8 +933,8 @@ if len(params) != sqlite.sqlite3_bind_parameter_count(self.statement): raise ProgrammingError("wrong number of arguments") - for idx, param in enumerate(params): - self.set_param(idx+1, param) + for i in range(len(params)): + self.set_param(i+1, params[i]) else: for idx in range(1, sqlite.sqlite3_bind_parameter_count(self.statement) + 1): param_name = sqlite.sqlite3_bind_parameter_name(self.statement, idx) From commits-noreply at bitbucket.org Mon Mar 7 10:57:47 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Mon, 7 Mar 2011 10:57:47 +0100 (CET) Subject: [pypy-svn] pypy pytest2: substitute a non-portable conftest hack that pretended that lib_pypy's contents were also in pypy/module Message-ID: <20110307095747.33C6A36C20D@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42454:ef3c80ea6e02 Date: 2011-03-07 10:56 +0100 http://bitbucket.org/pypy/pypy/changeset/ef3c80ea6e02/ Log: substitute a non-portable conftest hack that pretended that lib_pypy's contents were also in pypy/module diff --git a/lib_pypy/conftest.py b/lib_pypy/conftest.py new file mode 100644 --- /dev/null +++ b/lib_pypy/conftest.py @@ -0,0 +1,2 @@ + +from pypy.conftest import * diff --git a/pypy/module/conftest.py b/pypy/module/conftest.py deleted file mode 100644 --- a/pypy/module/conftest.py +++ /dev/null @@ -1,18 +0,0 @@ -import py -from pypy.tool.lib_pypy import LIB_PYPY - -class MultipleDirCollector(py.test.collect.Collector): - def __init__(self, name, mainfspath, fspaths, parent=None, config=None): - super(MultipleDirCollector, self).__init__(name, parent, config) - self.main_collector = py.test.collect.Directory(mainfspath, self) - self.collectors = [py.test.collect.Directory(fspath, self) - for fspath in fspaths] - - def collect(self): - return self.main_collector.collect() + self.collectors - - -def pytest_collect_directory(path, parent): - if path.basename == 'test_lib_pypy': - # collect all the test in BOTH test_lib_pypy and ../../lib_pypy - return MultipleDirCollector(path.basename, path, [LIB_PYPY], parent) From commits-noreply at bitbucket.org Mon Mar 7 11:14:08 2011 From: commits-noreply at bitbucket.org (ademan) Date: Mon, 7 Mar 2011 11:14:08 +0100 (CET) Subject: [pypy-svn] pypy default: Changed exception in create_function to act like CPython. Fixed error in sqlite test userfunctions CheckFuncErrorOnCreate Message-ID: <20110307101408.4D475282BEA@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42455:0656676831d1 Date: 2011-03-07 02:13 -0800 http://bitbucket.org/pypy/pypy/changeset/0656676831d1/ Log: Changed exception in create_function to act like CPython. Fixed error in sqlite test userfunctions CheckFuncErrorOnCreate diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -546,7 +546,7 @@ self._check_closed() try: c_closure, _ = self.func_cache[callback] - except KeyError: + except KeyError: def closure(context, nargs, c_params): function_callback(callback, context, nargs, c_params) c_closure = FUNC(closure) @@ -557,7 +557,7 @@ cast(None, STEP), cast(None, FINAL)) if ret != SQLITE_OK: - raise self._get_exception(ret) + raise self.OperationalError("Error creating function") def create_aggregate(self, name, num_args, cls): self._check_thread() From commits-noreply at bitbucket.org Mon Mar 7 11:34:17 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Mon, 7 Mar 2011 11:34:17 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: another meaning less commit to test mail/commit hook Message-ID: <20110307103417.79A46282BDE@codespeak.net> Author: holger krekel Branch: extradoc Changeset: r3358:c16b214d9a58 Date: 2011-03-07 11:34 +0100 http://bitbucket.org/pypy/extradoc/changeset/c16b214d9a58/ Log: another meaning less commit to test mail/commit hook diff --git a/talk/ustour2011/google-abstract2.txt b/talk/ustour2011/google-abstract2.txt --- a/talk/ustour2011/google-abstract2.txt +++ b/talk/ustour2011/google-abstract2.txt @@ -59,3 +59,4 @@ The PyPy project has a blog which may be of interest. You can read it at http://morepypy.blogspot.com/ . + From hpk at codespeak.net Mon Mar 7 13:32:24 2011 From: hpk at codespeak.net (hpk at codespeak.net) Date: Mon, 7 Mar 2011 13:32:24 +0100 (CET) Subject: [pypy-svn] r80409 - pypy/build/testrunner Message-ID: <20110307123224.E6909282BDE@codespeak.net> Author: hpk Date: Mon Mar 7 13:32:20 2011 New Revision: 80409 Modified: pypy/build/testrunner/runner.py Log: re-add py/bin/py.test and py/bin/_findpy.py to ease transition wrt to nightly jobs Modified: pypy/build/testrunner/runner.py ============================================================================== --- pypy/build/testrunner/runner.py (original) +++ pypy/build/testrunner/runner.py Mon Mar 7 13:32:20 2011 @@ -102,7 +102,7 @@ args = map(str, args) interp0 = args[0] if (_win32 and not os.path.isabs(interp0) and - ('\\' in interp0 or '/' in interp0)): + ('\\' in interp0 or '/' in interp0)): args[0] = os.path.join(str(cwd), interp0) if do_dry_run: @@ -139,7 +139,7 @@ elif exitcode == RUNFAILED: msg = "Failed to run interp" elif exitcode == EXECUTEFAILED: - msg = "Failed with exception in execute-test" + msg = "Failed with exception in execute-test" else: msg = "Killed by %s." % getsignalname(-exitcode) extralog = "! %s\n %s\n" % (test, msg) @@ -163,7 +163,7 @@ result_queue.put(None) # done return result_queue.put(('start', test)) - basename = py.path.local(test).purebasename + basename = py.path.local(test).purebasename logfname = sessdir.join("%d-%s-pytest-log" % (num, basename)) one_output = sessdir.join("%d-%s-output" % (num, basename)) num += n @@ -181,7 +181,7 @@ traceback.print_exc() exitcode = EXECUTEFAILED - if one_output.check(file=1): + if one_output.check(file=1): output = one_output.read(READ_MODE) else: output = "" @@ -238,7 +238,7 @@ if res[0] == 'start': started += 1 out.write("++ starting %s [%d started in total]\n" % (res[1], - started)) + started)) continue testname, somefailed, logdata, output = res[1:] @@ -261,7 +261,12 @@ class RunParam(object): dry_run = False interp = [os.path.abspath(sys.executable)] - test_driver = [os.path.abspath(os.path.join('py', 'bin', 'py.test'))] + pytestpath = os.path.abspath(os.path.join('py', 'bin', 'py.test')) + if not os.path.exists(pytestpath): + pytestpath = os.path.abspath(os.path.join('pytest.py')) + assert os.path.exists(pytestpath) + test_driver = [pytestpath] + parallel_runs = 1 timeout = None cherrypick = None From commits-noreply at bitbucket.org Mon Mar 7 13:33:51 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Mon, 7 Mar 2011 13:33:51 +0100 (CET) Subject: [pypy-svn] pypy pytest2: (really) re-add py/bin/py.test and py/bin/_findpy.py to ease transition wrt to nightly jobs Message-ID: <20110307123351.3C6B4282BDE@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42456:e12baeb2c897 Date: 2011-03-07 13:33 +0100 http://bitbucket.org/pypy/pypy/changeset/e12baeb2c897/ Log: (really) re-add py/bin/py.test and py/bin/_findpy.py to ease transition wrt to nightly jobs diff --git a/py/bin/py.test b/py/bin/py.test new file mode 100755 --- /dev/null +++ b/py/bin/py.test @@ -0,0 +1,3 @@ +#!/usr/bin/env python +from _findpy import pytest +pytest.main() diff --git a/py/bin/_findpy.py b/py/bin/_findpy.py new file mode 100644 --- /dev/null +++ b/py/bin/_findpy.py @@ -0,0 +1,38 @@ +#!/usr/bin/env python + +# +# find and import a version of 'py' +# +import sys +import os +from os.path import dirname as opd, exists, join, basename, abspath + +def searchpy(current): + while 1: + last = current + initpy = join(current, '__init__.py') + if not exists(initpy): + pydir = join(current, 'py') + # recognize py-package and ensure it is importable + if exists(pydir) and exists(join(pydir, '__init__.py')): + #for p in sys.path: + # if p == current: + # return True + if current != sys.path[0]: # if we are already first, then ok + sys.stderr.write("inserting into sys.path: %s\n" % current) + sys.path.insert(0, current) + return True + current = opd(current) + if last == current: + return False + +if not searchpy(abspath(os.curdir)): + if not searchpy(opd(abspath(sys.argv[0]))): + if not searchpy(opd(__file__)): + pass # let's hope it is just on sys.path + +import py +import pytest + +if __name__ == '__main__': + print ("py lib is at %s" % py.__file__) From commits-noreply at bitbucket.org Mon Mar 7 13:47:26 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 13:47:26 +0100 (CET) Subject: [pypy-svn] pypy default: sqlite: Implement the adapter microprotocol Message-ID: <20110307124726.3AEDC282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42457:675d9bb684ca Date: 2011-03-07 11:09 +0100 http://bitbucket.org/pypy/pypy/changeset/675d9bb684ca/ Log: sqlite: Implement the adapter microprotocol diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -890,9 +890,7 @@ if cvt is not None: cvt = param = cvt(param) - adapter = adapters.get((type(param), PrepareProtocol), None) - if adapter is not None: - param = adapter(param) + param = adapt(param) if param is None: sqlite.sqlite3_bind_null(self.statement, idx) @@ -1101,12 +1099,6 @@ return 1 return 0 -def register_adapter(typ, callable): - adapters[typ, PrepareProtocol] = callable - -def register_converter(name, callable): - converters[name.upper()] = callable - def _convert_params(con, nargs, params): _params = [] for i in range(nargs): @@ -1187,6 +1179,12 @@ class PrepareProtocol(object): pass +def register_adapter(typ, callable): + adapters[typ, PrepareProtocol] = callable + +def register_converter(name, callable): + converters[name.upper()] = callable + def register_adapters_and_converters(): def adapt_date(val): return val.isoformat() @@ -1216,11 +1214,39 @@ register_converter("date", convert_date) register_converter("timestamp", convert_timestamp) +def adapt(val, proto=PrepareProtocol): + # look for an adapter in the registry + adapter = adapters.get((type(val), proto), None) + if adapter is not None: + return adapter(val) + + # try to have the protocol adapt this object + if hasattr(proto, '__adapt__'): + try: + adapted = proto.__adapt__(val) + except TypeError: + pass + else: + if adapted is not None: + return adapted + + # and finally try to have the object adapt itself + if hasattr(val, '__conform__'): + try: + adapted = val.__conform__(proto) + except TypeError: + pass + else: + if adapted is not None: + return adapted + + return val + +register_adapters_and_converters() + def OptimizedUnicode(s): try: val = unicode(s, "ascii").encode("ascii") except UnicodeDecodeError: val = unicode(s, "utf-8") return val - -register_adapters_and_converters() From commits-noreply at bitbucket.org Mon Mar 7 13:47:26 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 13:47:26 +0100 (CET) Subject: [pypy-svn] pypy default: Correctly free CallbackPtr instances, which fixes the failing test in test_tracker.py Message-ID: <20110307124726.C03E9282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42458:30ab46b50ed2 Date: 2011-03-07 13:33 +0100 http://bitbucket.org/pypy/pypy/changeset/30ab46b50ed2/ Log: Correctly free CallbackPtr instances, which fixes the failing test in test_tracker.py diff --git a/pypy/module/_rawffi/test/test__rawffi.py b/pypy/module/_rawffi/test/test__rawffi.py --- a/pypy/module/_rawffi/test/test__rawffi.py +++ b/pypy/module/_rawffi/test/test__rawffi.py @@ -611,7 +611,7 @@ a3.free() a4.free() ll_to_sort.free() - del cb + cb.free() def test_another_callback(self): import _rawffi @@ -625,7 +625,7 @@ res = runcallback(a1) assert res[0] == 1<<42 a1.free() - del cb + cb.free() def test_void_returning_callback(self): import _rawffi @@ -641,7 +641,7 @@ assert res is None assert called == [True] a1.free() - del cb + cb.free() def test_another_callback_in_stackless(self): try: @@ -668,7 +668,7 @@ res = runcallback(a1) assert res[0] == 1<<42 a1.free() - del cb + cb.free() def test_raising_callback(self): import _rawffi, sys @@ -686,7 +686,7 @@ a1 = cb.byptr() res = runcallback(a1) a1.free() - del cb + cb.free() val = err.getvalue() assert 'ZeroDivisionError' in val assert 'callback' in val @@ -970,6 +970,7 @@ res = op_x_y(x_y, a1) a1.free() x_y.free() + cb.free() assert res[0] == 420 From commits-noreply at bitbucket.org Mon Mar 7 14:48:06 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 14:48:06 +0100 (CET) Subject: [pypy-svn] pypy default: Fix tests in test_app_main: Popen.communicate() can't be called on interactive programs, Message-ID: <20110307134806.1A41A282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42459:8b19bd95646c Date: 2011-03-07 14:47 +0100 http://bitbucket.org/pypy/pypy/changeset/8b19bd95646c/ Log: Fix tests in test_app_main: Popen.communicate() can't be called on interactive programs, there is no way to close them. diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py --- a/pypy/translator/goal/test2/test_app_main.py +++ b/pypy/translator/goal/test2/test_app_main.py @@ -546,8 +546,10 @@ ) child_in, child_out_err = process.stdin, process.stdout child_in.write(senddata) + child_in.close() data = child_out_err.read() - process.communicate() + child_out_err.close() + process.wait() assert (banner in data) == expect_banner # no banner unless expected assert ('>>> ' in data) == expect_prompt # no prompt unless expected return data, process.returncode From commits-noreply at bitbucket.org Mon Mar 7 15:26:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 15:26:57 +0100 (CET) Subject: [pypy-svn] pypy default: Stop displaying the MSVC compiler version, it messes with app_main.py output. Message-ID: <20110307142657.0FD6A36C20C@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42460:f75f6e15ce49 Date: 2011-03-07 15:24 +0100 http://bitbucket.org/pypy/pypy/changeset/f75f6e15ce49/ Log: Stop displaying the MSVC compiler version, it messes with app_main.py output. diff --git a/pypy/translator/platform/windows.py b/pypy/translator/platform/windows.py --- a/pypy/translator/platform/windows.py +++ b/pypy/translator/platform/windows.py @@ -37,7 +37,7 @@ key, value = line.split('=', 1) if key.upper() in ['PATH', 'INCLUDE', 'LIB']: env[key.upper()] = value - log.msg("Updated environment with %s" % (vcvars,)) + ## log.msg("Updated environment with %s" % (vcvars,)) return env def find_msvc_env(): From commits-noreply at bitbucket.org Mon Mar 7 15:26:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 15:26:57 +0100 (CET) Subject: [pypy-svn] pypy default: Don't update argv[0] when executing a directory (which contains __main__.py) Message-ID: <20110307142657.C649836C20C@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42461:cf9ef708a291 Date: 2011-03-07 15:25 +0100 http://bitbucket.org/pypy/pypy/changeset/cf9ef708a291/ Log: Don't update argv[0] when executing a directory (which contains __main__.py) diff --git a/pypy/translator/goal/app_main.py b/pypy/translator/goal/app_main.py --- a/pypy/translator/goal/app_main.py +++ b/pypy/translator/goal/app_main.py @@ -593,7 +593,7 @@ # the module __main__ import runpy sys.path.insert(0, filename) - args = (runpy._run_module_as_main, '__main__') + args = (runpy._run_module_as_main, '__main__', False) else: # no. That's the normal path, "pypy stuff.py". args = (execfile, filename, mainmodule.__dict__) diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py --- a/pypy/translator/goal/test2/test_app_main.py +++ b/pypy/translator/goal/test2/test_app_main.py @@ -542,7 +542,8 @@ process = subprocess.Popen( cmdline, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, - shell=True, env=env + shell=True, env=env, + universal_newlines=True ) child_in, child_out_err = process.stdin, process.stdout child_in.write(senddata) @@ -732,11 +733,11 @@ def test_main_in_dir_commandline_argument(self): if not hasattr(runpy, '_run_module_as_main'): skip("requires CPython >= 2.6") - p = getscript_in_dir('print 6*7\n') + p = getscript_in_dir('import sys; print sys.argv[0]\n') data = self.run(p) - assert data == '42\n' + assert data == p + '\n' data = self.run(p + os.sep) - assert data == '42\n' + assert data == p + os.sep + '\n' def test_pythonioencoding(self): if sys.version_info < (2, 7): From commits-noreply at bitbucket.org Mon Mar 7 15:51:26 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 7 Mar 2011 15:51:26 +0100 (CET) Subject: [pypy-svn] jitviewer default: add a quick help on how to produce the logfile Message-ID: <20110307145126.1F4A836C20C@codespeak.net> Author: Antonio Cuni Branch: Changeset: r100:76c5b061fe29 Date: 2011-03-07 15:51 +0100 http://bitbucket.org/pypy/jitviewer/changeset/76c5b061fe29/ Log: add a quick help on how to produce the logfile diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -1,11 +1,14 @@ #!/usr/bin/env pypy-c """ A web-based browser of your log files. Run by -jitviewer.py [port] + jitviewer.py [port] and point your browser to http://localhost:5000 +Demo logfile available in this directory as 'log'. -Demo logfile available in this directory as 'log'. +To produce the logfile for your program, run: + + PYPYLOG=jit-log-opt:mylogfile.log pypy-c myapp.py """ import sys From commits-noreply at bitbucket.org Mon Mar 7 16:12:47 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 7 Mar 2011 16:12:47 +0100 (CET) Subject: [pypy-svn] jitviewer default: we need this category to know how many times loops are executed Message-ID: <20110307151247.A3D3A36C20D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r101:f111beab453f Date: 2011-03-07 16:12 +0100 http://bitbucket.org/pypy/jitviewer/changeset/f111beab453f/ Log: we need this category to know how many times loops are executed diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -8,7 +8,7 @@ To produce the logfile for your program, run: - PYPYLOG=jit-log-opt:mylogfile.log pypy-c myapp.py + PYPYLOG=jit-log-opt,jit-backend-counts:mylogfile.log pypy-c myapp.py """ import sys From commits-noreply at bitbucket.org Mon Mar 7 17:47:43 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 17:47:43 +0100 (CET) Subject: [pypy-svn] pypy default: Be sure to open the zip file in binary mode, Message-ID: <20110307164743.6DA2E36C20F@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42462:5d74d52827c9 Date: 2011-03-07 17:41 +0100 http://bitbucket.org/pypy/pypy/changeset/5d74d52827c9/ Log: Be sure to open the zip file in binary mode, otherwise CRLF translation occurs and CRC is out of sync... diff --git a/pypy/rlib/test/test_rzipfile.py b/pypy/rlib/test/test_rzipfile.py --- a/pypy/rlib/test/test_rzipfile.py +++ b/pypy/rlib/test/test_rzipfile.py @@ -20,7 +20,7 @@ cls.zipname = zipname zipfile = ZipFile(zipname, "w", compression=cls.compression) cls.year = time.localtime(time.time())[0] - zipfile.writestr("one", "stuff") + zipfile.writestr("one", "stuff\n") zipfile.writestr("dir" + os.path.sep + "two", "otherstuff") # Value selected to produce a CRC32 which is negative if # interpreted as a signed 32 bit integer. This exercises the @@ -36,7 +36,7 @@ rzip = RZipFile(zipname, "r", compression) info = rzip.getinfo('one') return (info.date_time[0] == year and - rzip.read('one') == 'stuff' and + rzip.read('one') == 'stuff\n' and rzip.read('three') == 'hello, world') assert one() diff --git a/pypy/rlib/rzipfile.py b/pypy/rlib/rzipfile.py --- a/pypy/rlib/rzipfile.py +++ b/pypy/rlib/rzipfile.py @@ -150,11 +150,11 @@ raise TypeError("Read only support by now") self.compression = compression self.filename = zipname - self.mode = mode self.filelist = [] self.NameToInfo = {} if 'b' not in mode: mode += 'b' + self.mode = mode fp = self.get_fp() try: self._GetContents(fp) @@ -260,4 +260,4 @@ raise BadZipfile, "Bad CRC-32 for file %s" % filename return bytes finally: - fp.close() \ No newline at end of file + fp.close() From commits-noreply at bitbucket.org Mon Mar 7 17:47:44 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 17:47:44 +0100 (CET) Subject: [pypy-svn] pypy default: Catch raw BadZipfile in zipimport module, Message-ID: <20110307164744.3B71936C20F@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42463:5ba25cc48026 Date: 2011-03-07 17:44 +0100 http://bitbucket.org/pypy/pypy/changeset/5ba25cc48026/ Log: Catch raw BadZipfile in zipimport module, don't let a corrupt zip file crash the interpreter diff --git a/pypy/module/zipimport/interp_zipimport.py b/pypy/module/zipimport/interp_zipimport.py --- a/pypy/module/zipimport/interp_zipimport.py +++ b/pypy/module/zipimport/interp_zipimport.py @@ -246,7 +246,7 @@ fname = filename + ext try: buf = self.zip_file.read(fname) - except (KeyError, OSError): + except (KeyError, OSError, BadZipfile): pass else: if is_package: @@ -277,7 +277,7 @@ try: data = self.zip_file.read(filename) return w(data) - except (KeyError, OSError): + except (KeyError, OSError, BadZipfile): raise OperationError(space.w_IOError, space.wrap("Error reading file")) @unwrap_spec(fullname=str) @@ -345,8 +345,6 @@ @unwrap_spec(name=str) def descr_new_zipimporter(space, w_type, name): w = space.wrap - w_ZipImportError = space.getattr(space.getbuiltinmodule('zipimport'), - w('ZipImportError')) ok = False parts_ends = [i for i in range(0, len(name)) if name[i] == os.path.sep or name[i] == ZIPSEP] @@ -359,18 +357,18 @@ try: s = os.stat(filename) except OSError: - raise operationerrfmt(w_ZipImportError, + raise operationerrfmt(get_error(space), "Cannot find name %s", filename) if not stat.S_ISDIR(s.st_mode): ok = True break if not ok: - raise operationerrfmt(w_ZipImportError, + raise operationerrfmt(get_error(space), "Did not find %s to be a valid zippath", name) try: w_result = zip_cache.get(filename) if w_result is None: - raise operationerrfmt(w_ZipImportError, + raise operationerrfmt(get_error(space), "Cannot import %s from zipfile, recursion detected or" "already tried and failed", name) except KeyError: @@ -378,7 +376,7 @@ try: zip_file = RZipFile(filename, 'r') except (BadZipfile, OSError): - raise operationerrfmt(w_ZipImportError, + raise operationerrfmt(get_error(space), "%s seems not to be a zipfile", filename) prefix = name[len(filename):] if prefix.startswith(os.path.sep) or prefix.startswith(ZIPSEP): diff --git a/pypy/module/zipimport/__init__.py b/pypy/module/zipimport/__init__.py --- a/pypy/module/zipimport/__init__.py +++ b/pypy/module/zipimport/__init__.py @@ -9,7 +9,7 @@ interpleveldefs = { 'zipimporter':'interp_zipimport.W_ZipImporter', '_zip_directory_cache' : 'space.wrap(interp_zipimport.zip_cache)', - 'ZipImportError': 'space.fromcache(interp_zipimport.Cache).w_error', + 'ZipImportError': 'interp_zipimport.get_error(space)', } appleveldefs = { From commits-noreply at bitbucket.org Mon Mar 7 18:20:56 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 7 Mar 2011 18:20:56 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: make a few things use a mono-spaced font, fix a `---` (which ReST helpfully Message-ID: <20110307172056.4EC1C282BE9@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3359:96e4b69cfacd Date: 2011-03-07 18:20 +0100 http://bitbucket.org/pypy/extradoc/changeset/96e4b69cfacd/ Log: make a few things use a mono-spaced font, fix a `---` (which ReST helpfully escapes for you). diff --git a/talk/ustour2011/google-talk.txt b/talk/ustour2011/google-talk.txt --- a/talk/ustour2011/google-talk.txt +++ b/talk/ustour2011/google-talk.txt @@ -143,7 +143,7 @@ * Depends on the use case -* Much better than CPython for instances of classes with no __slots__ +* Much better than CPython for instances of classes with no ``__slots__`` * On running PyPy's translation toolchain on 32-bits: 1.7GB with PyPy (including the JIT machine code), versus 1.2GB with CPython @@ -169,7 +169,7 @@ * More so than, say, Jython or IronPython * Main difference: Garbage Collection is not refcounting (because we - could get much better GCs) --- so __del__ methods are not called + could get much better GCs) — so ``__del__`` methods are not called immediately and predictively * Apart from that, it is really 99.99% compatible @@ -250,17 +250,17 @@ How ``a + b`` works (simplified!): -* look up the method __add__ on the type of a +* look up the method ``__add__`` on the type of a * if there is one, call it * if it returns NotImplemented, or if there is none, - look up the method __radd__ on the type of b + look up the method ``__radd__`` on the type of b * if there is one, call it -* if there is none, or we get NotImplemented again, - raise an exception TypeError +* if there is none, or we get ``NotImplemented`` again, + raise an exception ``TypeError`` Python is a mess From commits-noreply at bitbucket.org Mon Mar 7 19:00:46 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 7 Mar 2011 19:00:46 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Adapt Makefile to newer rst2beamer Message-ID: <20110307180046.B1527282BE9@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3360:dd92d7ee8bb6 Date: 2011-03-07 10:00 -0800 http://bitbucket.org/pypy/extradoc/changeset/dd92d7ee8bb6/ Log: Adapt Makefile to newer rst2beamer Add a compatibility point diff --git a/talk/ustour2011/google-talk.pdf b/talk/ustour2011/google-talk.pdf index c7ce515be1e288abd453bdb4807f2fb7ab9c1357..8c1eb0a8b56edd29897fe65b7addede73df2344f GIT binary patch [cut] diff --git a/talk/ustour2011/Makefile b/talk/ustour2011/Makefile --- a/talk/ustour2011/Makefile +++ b/talk/ustour2011/Makefile @@ -1,7 +1,7 @@ google-talk.pdf: google-talk.txt author.latex title.latex stylesheet.latex - rst2beamer.py --stylesheet=stylesheet.latex --documentoptions=14pt google-talk.txt google-talk.latex || exit + rst2beamer --input-encoding=utf-8 --output-encoding=utf-8 --stylesheet=stylesheet.latex --documentoptions=14pt --theme=Warsaw --overlaybullets=False google-talk.txt google-talk.latex || exit sed 's/\\date{}/\\input{author.latex}/' -i google-talk.latex || exit sed 's/\\maketitle/\\input{title.latex}/' -i google-talk.latex || exit pdflatex google-talk.latex || exit diff --git a/talk/ustour2011/google-talk.txt b/talk/ustour2011/google-talk.txt --- a/talk/ustour2011/google-talk.txt +++ b/talk/ustour2011/google-talk.txt @@ -172,6 +172,8 @@ could get much better GCs) — so ``__del__`` methods are not called immediately and predictively +* 2.5 compatibility release, 2.7 on trunk + * Apart from that, it is really 99.99% compatible From commits-noreply at bitbucket.org Mon Mar 7 21:31:24 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 21:31:24 +0100 (CET) Subject: [pypy-svn] pypy default: Add mutable copy of test_runpy.py Message-ID: <20110307203124.BED12282BE9@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42464:31afba84e18d Date: 2011-03-07 20:49 +0100 http://bitbucket.org/pypy/pypy/changeset/31afba84e18d/ Log: Add mutable copy of test_runpy.py diff --git a/lib-python/2.7.0/test/test_runpy.py b/lib-python/modified-2.7.0/test/test_runpy.py copy from lib-python/2.7.0/test/test_runpy.py copy to lib-python/modified-2.7.0/test/test_runpy.py From commits-noreply at bitbucket.org Mon Mar 7 21:31:26 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 7 Mar 2011 21:31:26 +0100 (CET) Subject: [pypy-svn] pypy default: PyPy does not import "lone .pyc file" when the .py file does not exist. Message-ID: <20110307203126.05993282BE9@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42465:f74ac2a0986c Date: 2011-03-07 21:31 +0100 http://bitbucket.org/pypy/pypy/changeset/f74ac2a0986c/ Log: PyPy does not import "lone .pyc file" when the .py file does not exist. Skip the relevant parts of test_runpy diff --git a/lib-python/modified-2.7.0/test/test_runpy.py b/lib-python/modified-2.7.0/test/test_runpy.py --- a/lib-python/modified-2.7.0/test/test_runpy.py +++ b/lib-python/modified-2.7.0/test/test_runpy.py @@ -5,10 +5,15 @@ import sys import re import tempfile -from test.test_support import verbose, run_unittest, forget +from test.test_support import verbose, run_unittest, forget, check_impl_detail from test.script_helper import (temp_dir, make_script, compile_script, make_pkg, make_zip_script, make_zip_pkg) +if check_impl_detail(pypy=True): + no_lone_pyc_file = True +else: + no_lone_pyc_file = False + from runpy import _run_code, _run_module_code, run_module, run_path # Note: This module can't safely test _run_module_as_main as it @@ -168,13 +173,14 @@ self.assertIn("x", d1) self.assertTrue(d1["x"] == 1) del d1 # Ensure __loader__ entry doesn't keep file open - __import__(mod_name) - os.remove(mod_fname) - if verbose: print "Running from compiled:", mod_name - d2 = run_module(mod_name) # Read from bytecode - self.assertIn("x", d2) - self.assertTrue(d2["x"] == 1) - del d2 # Ensure __loader__ entry doesn't keep file open + if not no_lone_pyc_file: + __import__(mod_name) + os.remove(mod_fname) + if verbose: print "Running from compiled:", mod_name + d2 = run_module(mod_name) # Read from bytecode + self.assertIn("x", d2) + self.assertTrue(d2["x"] == 1) + del d2 # Ensure __loader__ entry doesn't keep file open finally: self._del_pkg(pkg_dir, depth, mod_name) if verbose: print "Module executed successfully" @@ -190,13 +196,14 @@ self.assertIn("x", d1) self.assertTrue(d1["x"] == 1) del d1 # Ensure __loader__ entry doesn't keep file open - __import__(mod_name) - os.remove(mod_fname) - if verbose: print "Running from compiled:", pkg_name - d2 = run_module(pkg_name) # Read from bytecode - self.assertIn("x", d2) - self.assertTrue(d2["x"] == 1) - del d2 # Ensure __loader__ entry doesn't keep file open + if not no_lone_pyc_file: + __import__(mod_name) + os.remove(mod_fname) + if verbose: print "Running from compiled:", pkg_name + d2 = run_module(pkg_name) # Read from bytecode + self.assertIn("x", d2) + self.assertTrue(d2["x"] == 1) + del d2 # Ensure __loader__ entry doesn't keep file open finally: self._del_pkg(pkg_dir, depth, pkg_name) if verbose: print "Package executed successfully" @@ -244,15 +251,17 @@ self.assertIn("sibling", d1) self.assertIn("nephew", d1) del d1 # Ensure __loader__ entry doesn't keep file open - __import__(mod_name) - os.remove(mod_fname) - if verbose: print "Running from compiled:", mod_name - d2 = run_module(mod_name, run_name=run_name) # Read from bytecode - self.assertIn("__package__", d2) - self.assertTrue(d2["__package__"] == pkg_name) - self.assertIn("sibling", d2) - self.assertIn("nephew", d2) - del d2 # Ensure __loader__ entry doesn't keep file open + if not no_lone_pyc_file: + __import__(mod_name) + os.remove(mod_fname) + if verbose: print "Running from compiled:", mod_name + # Read from bytecode + d2 = run_module(mod_name, run_name=run_name) + self.assertIn("__package__", d2) + self.assertTrue(d2["__package__"] == pkg_name) + self.assertIn("sibling", d2) + self.assertIn("nephew", d2) + del d2 # Ensure __loader__ entry doesn't keep file open finally: self._del_pkg(pkg_dir, depth, mod_name) if verbose: print "Module executed successfully" @@ -345,6 +354,8 @@ script_dir, '') def test_directory_compiled(self): + if no_lone_pyc_file: + return with temp_dir() as script_dir: mod_name = '__main__' script_name = self._make_test_script(script_dir, mod_name) From commits-noreply at bitbucket.org Mon Mar 7 21:52:55 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 7 Mar 2011 21:52:55 +0100 (CET) Subject: [pypy-svn] pypy default: (David Edelsohn) make the x86 backend no longer emit the XCHG, which implies Message-ID: <20110307205255.24389282BE9@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42466:6599d8ca8be1 Date: 2011-03-07 21:51 +0100 http://bitbucket.org/pypy/pypy/changeset/6599d8ca8be1/ Log: (David Edelsohn) make the x86 backend no longer emit the XCHG, which implies atomic semantics (which is not really needed in our case). diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -516,8 +516,8 @@ # XXX: Only here for testing purposes..."as" happens the encode the # registers in the opposite order that we would otherwise do in a - # register-register exchange - XCHG_rr = insn(rex_w, '\x87', register(1), register(2,8), '\xC0') + # register-register exchange. + #XCHG_rr = insn(rex_w, '\x87', register(1), register(2,8), '\xC0') JMP_l = insn('\xE9', relative(1)) JMP_r = insn(rex_nw, '\xFF', orbyte(4<<3), register(1), '\xC0') @@ -658,7 +658,7 @@ define_modrm_modes('MOVSD_x*', ['\xF2', rex_nw, '\x0F\x10', register(1,8)], regtype='XMM') define_modrm_modes('MOVSD_*x', ['\xF2', rex_nw, '\x0F\x11', register(2,8)], regtype='XMM') -define_modrm_modes('XCHG_r*', [rex_w, '\x87', register(1, 8)]) +#define_modrm_modes('XCHG_r*', [rex_w, '\x87', register(1, 8)]) define_modrm_modes('ADDSD_x*', ['\xF2', rex_nw, '\x0F\x58', register(1, 8)], regtype='XMM') define_modrm_modes('SUBSD_x*', ['\xF2', rex_nw, '\x0F\x5C', register(1, 8)], regtype='XMM') diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -698,11 +698,9 @@ else: target = tmp if inputargs[i].type == REF: - # This uses XCHG to put zeroes in fail_boxes_ptr after - # reading them - self.mc.XOR(target, target) adr = self.fail_boxes_ptr.get_addr_for_num(i) - self.mc.XCHG(target, heap(adr)) + self.mc.MOV(target, heap(adr)) + self.mc.MOV(heap(adr), imm0) else: adr = self.fail_boxes_int.get_addr_for_num(i) self.mc.MOV(target, heap(adr)) @@ -1908,8 +1906,8 @@ self.mc.MOV(eax, heap(adr)) elif kind == REF: adr = self.fail_boxes_ptr.get_addr_for_num(0) - self.mc.XOR_rr(eax.value, eax.value) - self.mc.XCHG(eax, heap(adr)) + self.mc.MOV(eax, heap(adr)) + self.mc.MOV(heap(adr), imm0) else: raise AssertionError(kind) # diff --git a/pypy/jit/backend/x86/test/test_rx86.py b/pypy/jit/backend/x86/test/test_rx86.py --- a/pypy/jit/backend/x86/test/test_rx86.py +++ b/pypy/jit/backend/x86/test/test_rx86.py @@ -118,11 +118,6 @@ s.SET_ir(5, dl) assert s.getvalue() == '\x0F\x95\xC2' -def test_xchg_rj(): - s = CodeBuilder32() - s.XCHG_rj(edx, 0x01234567) - assert s.getvalue() == '\x87\x15\x67\x45\x23\x01' - def test_movsd_rj(): s = CodeBuilder32() s.MOVSD_xj(xmm2, 0x01234567) diff --git a/pypy/jit/backend/x86/regloc.py b/pypy/jit/backend/x86/regloc.py --- a/pypy/jit/backend/x86/regloc.py +++ b/pypy/jit/backend/x86/regloc.py @@ -491,7 +491,9 @@ MOVSX16 = _binaryop('MOVSX16') MOV32 = _binaryop('MOV32') MOVSX32 = _binaryop('MOVSX32') - XCHG = _binaryop('XCHG') + # Avoid XCHG because it always implies atomic semantics, which is + # slower and does not pair well for dispatch. + #XCHG = _binaryop('XCHG') PUSH = _unaryop('PUSH') POP = _unaryop('POP') From commits-noreply at bitbucket.org Mon Mar 7 21:54:42 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 7 Mar 2011 21:54:42 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: this is done Message-ID: <20110307205442.68213282BE9@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3361:cdea7598532d Date: 2011-03-07 21:54 +0100 http://bitbucket.org/pypy/extradoc/changeset/cdea7598532d/ Log: this is done diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -48,9 +48,6 @@ - local imports should be jitted more efficiently, right now they produce a long trace and they are rather common (e.g. in translate.py) -- don't use XCHG in the x86 backend, as that implies some sort of locking, that - we don't need and might be expensive. - - the integer range analysis cannot deal with int_between, because it is lowered to uint arithmetic too early From commits-noreply at bitbucket.org Tue Mar 8 02:05:14 2011 From: commits-noreply at bitbucket.org (ademan) Date: Tue, 8 Mar 2011 02:05:14 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed the last errors in sqlite tests. Message-ID: <20110308010514.C8C9C36C204@codespeak.net> Author: Daniel Roberts Branch: Changeset: r42467:fe7b1cf37df8 Date: 2011-03-07 17:04 -0800 http://bitbucket.org/pypy/pypy/changeset/fe7b1cf37df8/ Log: Fixed the last errors in sqlite tests. diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -228,6 +228,9 @@ factory = kwargs.get("factory", Connection) return factory(database, **kwargs) +def unicode_text_factory(x): + return unicode(x, 'utf-8') + class Connection(object): def __init__(self, database, isolation_level="", detect_types=0, timeout=None, *args, **kwargs): self.db = c_void_p() @@ -237,7 +240,7 @@ timeout = int(timeout * 1000) # pysqlite2 uses timeout in seconds sqlite.sqlite3_busy_timeout(self.db, timeout) - self.text_factory = lambda x: unicode(x, "utf-8") + self.text_factory = unicode_text_factory self.closed = False self.statements = [] self.statement_counter = 0 @@ -643,7 +646,6 @@ self.connection = con self._description = None self.arraysize = 1 - self.text_factory = con.text_factory self.row_factory = None self.rowcount = -1 self.statement = None @@ -885,6 +887,17 @@ self.row_cast_map.append(converter) + def _check_decodable(self, param): + if self.con.text_factory in (unicode, OptimizedUnicode, unicode_text_factory): + for c in param: + if ord(c) & 0x80 != 0: + raise self.con.ProgrammingError( + "You must not use 8-bit bytestrings unless " + "you use a text_factory that can interpret " + "8-bit bytestrings (like text_factory = str). " + "It is highly recommended that you instead " + "just switch your application to Unicode strings.") + def set_param(self, idx, param): cvt = converters.get(type(param)) if cvt is not None: @@ -902,6 +915,7 @@ elif type(param) is float: sqlite.sqlite3_bind_double(self.statement, idx, param) elif isinstance(param, str): + self._check_decodable(param) sqlite.sqlite3_bind_text(self.statement, idx, param, -1, SQLITE_TRANSIENT) elif isinstance(param, unicode): param = param.encode("utf-8") @@ -989,7 +1003,7 @@ val = None elif typ == SQLITE_TEXT: val = sqlite.sqlite3_column_text(self.statement, i) - val = self.cur().text_factory(val) + val = self.con.text_factory(val) else: blob = sqlite.sqlite3_column_blob(self.statement, i) if not blob: From commits-noreply at bitbucket.org Tue Mar 8 09:53:29 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 09:53:29 +0100 (CET) Subject: [pypy-svn] pypy pytest2: fix url / install references Message-ID: <20110308085329.44D1136C20A@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42468:1acf1ebd0fd0 Date: 2011-03-07 13:48 +0100 http://bitbucket.org/pypy/pypy/changeset/1acf1ebd0fd0/ Log: fix url / install references diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -1,6 +1,7 @@ """ unit and functional testing with Python. (pypy version of startup script) +see http://pytest.org for details. """ __version__ = '2.0.2.dev5' # base pytest version __all__ = ['main'] diff --git a/pypy/test_all.py b/pypy/test_all.py --- a/pypy/test_all.py +++ b/pypy/test_all.py @@ -4,7 +4,8 @@ -------------------------- Running test_all.py is equivalent to running py.test -(either installed from the py lib package, or from ../py/bin/). +which you independently install, see +http://pytest.org/getting-started.html For more information, use test_all.py -h. """ diff --git a/.hgsubstate b/.hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,3 +1,3 @@ 80037 greenlet 80348 lib_pypy/pyrepl -80037 testrunner +80409 testrunner From commits-noreply at bitbucket.org Tue Mar 8 09:53:31 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 09:53:31 +0100 (CET) Subject: [pypy-svn] pypy pytest2: copy latest pytest release candidate Message-ID: <20110308085331.8B819282BE9@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42469:86986162c61b Date: 2011-03-08 09:51 +0100 http://bitbucket.org/pypy/pypy/changeset/86986162c61b/ Log: copy latest pytest release candidate diff --git a/_pytest/helpconfig.py b/_pytest/helpconfig.py --- a/_pytest/helpconfig.py +++ b/_pytest/helpconfig.py @@ -2,6 +2,7 @@ import py import pytest import inspect, sys +from _pytest.core import varnames def pytest_addoption(parser): group = parser.getgroup('debugconfig') @@ -135,12 +136,11 @@ fail = True else: #print "checking", method - method_args = getargs(method) - #print "method_args", method_args + method_args = list(varnames(method)) if '__multicall__' in method_args: method_args.remove('__multicall__') hook = hooks[name] - hookargs = getargs(hook) + hookargs = varnames(hook) for arg in method_args: if arg not in hookargs: Print("argument %r not available" %(arg, )) @@ -162,11 +162,6 @@ return name == "pytest_plugins" or \ name.startswith("pytest_funcarg__") -def getargs(func): - args = inspect.getargs(py.code.getrawcode(func))[0] - startindex = inspect.ismethod(func) and 1 or 0 - return args[startindex:] - def collectattr(obj): methods = {} for apiname in dir(obj): diff --git a/_pytest/pdb.py b/_pytest/pdb.py --- a/_pytest/pdb.py +++ b/_pytest/pdb.py @@ -52,7 +52,10 @@ if "xfail" in rep.keywords: return rep # we assume that the above execute() suspended capturing - tw = py.io.TerminalWriter() + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = item.config.pluginmanager.getplugin("terminalreporter")._tw tw.line() tw.sep(">", "traceback") rep.toterminal(tw) diff --git a/_pytest/core.py b/_pytest/core.py --- a/_pytest/core.py +++ b/_pytest/core.py @@ -60,6 +60,7 @@ class PluginManager(object): def __init__(self, load=False): self._name2plugin = {} + self._listattrcache = {} self._plugins = [] self._hints = [] self.trace = TagTracer().get("pluginmanage") @@ -272,6 +273,11 @@ def listattr(self, attrname, plugins=None): if plugins is None: plugins = self._plugins + key = (attrname,) + tuple(plugins) + try: + return list(self._listattrcache[key]) + except KeyError: + pass l = [] last = [] for plugin in plugins: @@ -286,6 +292,7 @@ except AttributeError: continue l.extend(last) + self._listattrcache[key] = list(l) return l def call_plugin(self, plugin, methname, kwargs): @@ -340,14 +347,20 @@ return kwargs def varnames(func): + try: + return func._varnames + except AttributeError: + pass if not inspect.isfunction(func) and not inspect.ismethod(func): func = getattr(func, '__call__', func) ismethod = inspect.ismethod(func) rawcode = py.code.getrawcode(func) try: - return rawcode.co_varnames[ismethod:rawcode.co_argcount] + x = rawcode.co_varnames[ismethod:rawcode.co_argcount] except AttributeError: - return () + x = () + py.builtin._getfuncdict(func)['_varnames'] = x + return x class HookRelay: def __init__(self, hookspecs, pm, prefix="pytest_"): diff --git a/_pytest/main.py b/_pytest/main.py --- a/_pytest/main.py +++ b/_pytest/main.py @@ -326,7 +326,13 @@ return self._location except AttributeError: location = self.reportinfo() - fspath = self.session.fspath.bestrelpath(location[0]) + # bestrelpath is a quite slow function + cache = self.config.__dict__.setdefault("_bestrelpathcache", {}) + try: + fspath = cache[location[0]] + except KeyError: + fspath = self.session.fspath.bestrelpath(location[0]) + cache[location[0]] = fspath location = (fspath, location[1], str(location[2])) self._location = location return location From commits-noreply at bitbucket.org Tue Mar 8 09:53:32 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 09:53:32 +0100 (CET) Subject: [pypy-svn] pypy pytest2: merge default Message-ID: <20110308085332.065D8282BEA@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42470:a9e3fe42b354 Date: 2011-03-08 09:52 +0100 http://bitbucket.org/pypy/pypy/changeset/a9e3fe42b354/ Log: merge default From commits-noreply at bitbucket.org Tue Mar 8 10:22:37 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 10:22:37 +0100 (CET) Subject: [pypy-svn] pypy pytest2: also bump the version Message-ID: <20110308092237.A653B282B90@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42471:14f2806aaa6d Date: 2011-03-08 10:21 +0100 http://bitbucket.org/pypy/pypy/changeset/14f2806aaa6d/ Log: also bump the version diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -3,7 +3,7 @@ (pypy version of startup script) see http://pytest.org for details. """ -__version__ = '2.0.2.dev5' # base pytest version +__version__ = '2.0.2.dev7' # base pytest version __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins From commits-noreply at bitbucket.org Tue Mar 8 12:14:03 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 12:14:03 +0100 (CET) Subject: [pypy-svn] pypy default: merge pytest2 branch Message-ID: <20110308111403.BEDC736C20A@codespeak.net> Author: holger krekel Branch: Changeset: r42472:e0647ab9125e Date: 2011-03-08 12:13 +0100 http://bitbucket.org/pypy/pypy/changeset/e0647ab9125e/ Log: merge pytest2 branch diff --git a/pypy/module/conftest.py b/pypy/module/conftest.py deleted file mode 100644 --- a/pypy/module/conftest.py +++ /dev/null @@ -1,18 +0,0 @@ -import py -from pypy.tool.lib_pypy import LIB_PYPY - -class MultipleDirCollector(py.test.collect.Collector): - def __init__(self, name, mainfspath, fspaths, parent=None, config=None): - super(MultipleDirCollector, self).__init__(name, parent, config) - self.main_collector = py.test.collect.Directory(mainfspath, self) - self.collectors = [py.test.collect.Directory(fspath, self) - for fspath in fspaths] - - def collect(self): - return self.main_collector.collect() + self.collectors - - -def pytest_collect_directory(path, parent): - if path.basename == 'test_lib_pypy': - # collect all the test in BOTH test_lib_pypy and ../../lib_pypy - return MultipleDirCollector(path.basename, path, [LIB_PYPY], parent) diff --git a/py/_test/parseopt.py b/py/_test/parseopt.py deleted file mode 100644 --- a/py/_test/parseopt.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -thin wrapper around Python's optparse.py -adding some extra checks and ways to systematically -have Environment variables provide default values -for options. basic usage: - - >>> parser = Parser() - >>> parser.addoption("--hello", action="store_true", dest="hello") - >>> option, args = parser.parse(['--hello']) - >>> option.hello - True - >>> args - [] - -""" -import py -import optparse - -class Parser: - """ Parser for command line arguments. """ - - def __init__(self, usage=None, processopt=None): - self._anonymous = OptionGroup("custom options", parser=self) - self._groups = [] - self._processopt = processopt - self._usage = usage - self.hints = [] - - def processoption(self, option): - if self._processopt: - if option.dest: - self._processopt(option) - - def addnote(self, note): - self._notes.append(note) - - def getgroup(self, name, description="", after=None): - for group in self._groups: - if group.name == name: - return group - group = OptionGroup(name, description, parser=self) - i = 0 - for i, grp in enumerate(self._groups): - if grp.name == after: - break - self._groups.insert(i+1, group) - return group - - addgroup = getgroup - def addgroup(self, name, description=""): - py.log._apiwarn("1.1", "use getgroup() which gets-or-creates") - return self.getgroup(name, description) - - def addoption(self, *opts, **attrs): - """ add an optparse-style option. """ - self._anonymous.addoption(*opts, **attrs) - - def parse(self, args): - optparser = MyOptionParser(self) - groups = self._groups + [self._anonymous] - for group in groups: - if group.options: - desc = group.description or group.name - optgroup = optparse.OptionGroup(optparser, desc) - optgroup.add_options(group.options) - optparser.add_option_group(optgroup) - return optparser.parse_args([str(x) for x in args]) - - def parse_setoption(self, args, option): - parsedoption, args = self.parse(args) - for name, value in parsedoption.__dict__.items(): - setattr(option, name, value) - return args - - -class OptionGroup: - def __init__(self, name, description="", parser=None): - self.name = name - self.description = description - self.options = [] - self.parser = parser - - def addoption(self, *optnames, **attrs): - """ add an option to this group. """ - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=False) - - def _addoption(self, *optnames, **attrs): - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=True) - - def _addoption_instance(self, option, shortupper=False): - if not shortupper: - for opt in option._short_opts: - if opt[0] == '-' and opt[1].islower(): - raise ValueError("lowercase shortoptions reserved") - if self.parser: - self.parser.processoption(option) - self.options.append(option) - - -class MyOptionParser(optparse.OptionParser): - def __init__(self, parser): - self._parser = parser - optparse.OptionParser.__init__(self, usage=parser._usage) - def format_epilog(self, formatter): - hints = self._parser.hints - if hints: - s = "\n".join(["hint: " + x for x in hints]) + "\n" - s = "\n" + s + "\n" - return s - return "" diff --git a/py/_plugin/pytest_pdb.py b/py/_plugin/pytest_pdb.py deleted file mode 100644 --- a/py/_plugin/pytest_pdb.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -interactive debugging with the Python Debugger. -""" -import py -import pdb, sys, linecache - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--pdb', - action="store_true", dest="usepdb", default=False, - help="start the interactive Python debugger on errors.") - -def pytest_configure(config): - if config.getvalue("usepdb"): - config.pluginmanager.register(PdbInvoke(), 'pdb') - -class PdbInvoke: - def pytest_runtest_makereport(self, item, call): - if call.excinfo and not \ - call.excinfo.errisinstance(py.test.skip.Exception): - # play well with capturing, slightly hackish - capman = item.config.pluginmanager.getplugin('capturemanager') - capman.suspendcapture() - - tw = py.io.TerminalWriter() - repr = call.excinfo.getrepr() - repr.toterminal(tw) - post_mortem(call.excinfo._excinfo[2]) - - capman.resumecapture_item(item) - -class Pdb(py.std.pdb.Pdb): - def do_list(self, arg): - self.lastcmd = 'list' - last = None - if arg: - try: - x = eval(arg, {}, {}) - if type(x) == type(()): - first, last = x - first = int(first) - last = int(last) - if last < first: - # Assume it's a count - last = first + last - else: - first = max(1, int(x) - 5) - except: - print ('*** Error in argument: %s' % repr(arg)) - return - elif self.lineno is None: - first = max(1, self.curframe.f_lineno - 5) - else: - first = self.lineno + 1 - if last is None: - last = first + 10 - filename = self.curframe.f_code.co_filename - breaklist = self.get_file_breaks(filename) - try: - for lineno in range(first, last+1): - # start difference from normal do_line - line = self._getline(filename, lineno) - # end difference from normal do_line - if not line: - print ('[EOF]') - break - else: - s = repr(lineno).rjust(3) - if len(s) < 4: s = s + ' ' - if lineno in breaklist: s = s + 'B' - else: s = s + ' ' - if lineno == self.curframe.f_lineno: - s = s + '->' - sys.stdout.write(s + '\t' + line) - self.lineno = lineno - except KeyboardInterrupt: - pass - do_l = do_list - - def _getline(self, filename, lineno): - if hasattr(filename, "__source__"): - try: - return filename.__source__.lines[lineno - 1] + "\n" - except IndexError: - return None - return linecache.getline(filename, lineno) - - def get_stack(self, f, t): - # Modified from bdb.py to be able to walk the stack beyond generators, - # which does not work in the normal pdb :-( - stack, i = pdb.Pdb.get_stack(self, f, t) - if f is None: - i = max(0, len(stack) - 1) - while i and stack[i][0].f_locals.get("__tracebackhide__", False): - i-=1 - return stack, i - -def post_mortem(t): - p = Pdb() - p.reset() - p.interaction(None, t) - -def set_trace(): - # again, a copy of the version in pdb.py - Pdb().set_trace(sys._getframe().f_back) diff --git a/py/_plugin/pytest_runner.py b/py/_plugin/pytest_runner.py deleted file mode 100644 --- a/py/_plugin/pytest_runner.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -collect and run test items and create reports. -""" - -import py, sys - -def pytest_namespace(): - return { - 'raises' : raises, - 'skip' : skip, - 'importorskip' : importorskip, - 'fail' : fail, - 'xfail' : xfail, - 'exit' : exit, - } - -# -# pytest plugin hooks - -# XXX move to pytest_sessionstart and fix py.test owns tests -def pytest_configure(config): - config._setupstate = SetupState() - -def pytest_sessionfinish(session, exitstatus): - if hasattr(session.config, '_setupstate'): - hook = session.config.hook - rep = hook.pytest__teardown_final(session=session) - if rep: - hook.pytest__teardown_final_logerror(report=rep) - -def pytest_make_collect_report(collector): - result = excinfo = None - try: - result = collector._memocollect() - except KeyboardInterrupt: - raise - except: - excinfo = py.code.ExceptionInfo() - return CollectReport(collector, result, excinfo) - -def pytest_runtest_protocol(item): - runtestprotocol(item) - return True - -def runtestprotocol(item, log=True): - rep = call_and_report(item, "setup", log) - reports = [rep] - if rep.passed: - reports.append(call_and_report(item, "call", log)) - reports.append(call_and_report(item, "teardown", log)) - return reports - -def pytest_runtest_setup(item): - item.config._setupstate.prepare(item) - -def pytest_runtest_call(item): - if not item._deprecated_testexecution(): - item.runtest() - -def pytest_runtest_makereport(item, call): - return ItemTestReport(item, call.excinfo, call.when) - -def pytest_runtest_teardown(item): - item.config._setupstate.teardown_exact(item) - -def pytest__teardown_final(session): - call = CallInfo(session.config._setupstate.teardown_all, when="teardown") - if call.excinfo: - ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir) - call.excinfo.traceback = ntraceback.filter() - rep = TeardownErrorReport(call.excinfo) - return rep - -def pytest_report_teststatus(report): - if report.when in ("setup", "teardown"): - if report.failed: - # category, shortletter, verbose-word - return "error", "E", "ERROR" - elif report.skipped: - return "skipped", "s", "SKIPPED" - else: - return "", "", "" -# -# Implementation - -def call_and_report(item, when, log=True): - call = call_runtest_hook(item, when) - hook = item.ihook - report = hook.pytest_runtest_makereport(item=item, call=call) - if log and (when == "call" or not report.passed): - hook.pytest_runtest_logreport(report=report) - return report - -def call_runtest_hook(item, when): - hookname = "pytest_runtest_" + when - ihook = getattr(item.ihook, hookname) - return CallInfo(lambda: ihook(item=item), when=when) - -class CallInfo: - excinfo = None - def __init__(self, func, when): - self.when = when - try: - self.result = func() - except KeyboardInterrupt: - raise - except: - self.excinfo = py.code.ExceptionInfo() - - def __repr__(self): - if self.excinfo: - status = "exception: %s" % str(self.excinfo.value) - else: - status = "result: %r" % (self.result,) - return "" % (self.when, status) - -class BaseReport(object): - def __repr__(self): - l = ["%s=%s" %(key, value) - for key, value in self.__dict__.items()] - return "<%s %s>" %(self.__class__.__name__, " ".join(l),) - - def toterminal(self, out): - longrepr = self.longrepr - if hasattr(longrepr, 'toterminal'): - longrepr.toterminal(out) - else: - out.line(str(longrepr)) - -class ItemTestReport(BaseReport): - failed = passed = skipped = False - - def __init__(self, item, excinfo=None, when=None): - self.item = item - self.when = when - if item and when != "setup": - self.keywords = item.readkeywords() - else: - # if we fail during setup it might mean - # we are not able to access the underlying object - # this might e.g. happen if we are unpickled - # and our parent collector did not collect us - # (because it e.g. skipped for platform reasons) - self.keywords = {} - if not excinfo: - self.passed = True - self.shortrepr = "." - else: - if not isinstance(excinfo, py.code.ExceptionInfo): - self.failed = True - shortrepr = "?" - longrepr = excinfo - elif excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - shortrepr = "s" - longrepr = self.item._repr_failure_py(excinfo) - else: - self.failed = True - shortrepr = self.item.shortfailurerepr - if self.when == "call": - longrepr = self.item.repr_failure(excinfo) - else: # exception in setup or teardown - longrepr = self.item._repr_failure_py(excinfo) - shortrepr = shortrepr.lower() - self.shortrepr = shortrepr - self.longrepr = longrepr - - def __repr__(self): - status = (self.passed and "passed" or - self.skipped and "skipped" or - self.failed and "failed" or - "CORRUPT") - l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,] - if hasattr(self, 'node'): - l.append("txnode=%s" % self.node.gateway.id) - info = " " .join(map(str, l)) - return "" % info - - def getnode(self): - return self.item - -class CollectReport(BaseReport): - skipped = failed = passed = False - - def __init__(self, collector, result, excinfo=None): - self.collector = collector - if not excinfo: - self.passed = True - self.result = result - else: - style = "short" - if collector.config.getvalue("fulltrace"): - style = "long" - self.longrepr = self.collector._repr_failure_py(excinfo, - style=style) - if excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - self.reason = str(excinfo.value) - else: - self.failed = True - - def getnode(self): - return self.collector - -class TeardownErrorReport(BaseReport): - skipped = passed = False - failed = True - when = "teardown" - def __init__(self, excinfo): - self.longrepr = excinfo.getrepr(funcargs=True) - -class SetupState(object): - """ shared state for setting up/tearing down test items or collectors. """ - def __init__(self): - self.stack = [] - self._finalizers = {} - - def addfinalizer(self, finalizer, colitem): - """ attach a finalizer to the given colitem. - if colitem is None, this will add a finalizer that - is called at the end of teardown_all(). - """ - assert hasattr(finalizer, '__call__') - #assert colitem in self.stack - self._finalizers.setdefault(colitem, []).append(finalizer) - - def _pop_and_teardown(self): - colitem = self.stack.pop() - self._teardown_with_finalization(colitem) - - def _callfinalizers(self, colitem): - finalizers = self._finalizers.pop(colitem, None) - while finalizers: - fin = finalizers.pop() - fin() - - def _teardown_with_finalization(self, colitem): - self._callfinalizers(colitem) - if colitem: - colitem.teardown() - for colitem in self._finalizers: - assert colitem is None or colitem in self.stack - - def teardown_all(self): - while self.stack: - self._pop_and_teardown() - self._teardown_with_finalization(None) - assert not self._finalizers - - def teardown_exact(self, item): - if self.stack and item == self.stack[-1]: - self._pop_and_teardown() - else: - self._callfinalizers(item) - - def prepare(self, colitem): - """ setup objects along the collector chain to the test-method - and teardown previously setup objects.""" - needed_collectors = colitem.listchain() - while self.stack: - if self.stack == needed_collectors[:len(self.stack)]: - break - self._pop_and_teardown() - # check if the last collection node has raised an error - for col in self.stack: - if hasattr(col, '_prepare_exc'): - py.builtin._reraise(*col._prepare_exc) - for col in needed_collectors[len(self.stack):]: - self.stack.append(col) - try: - col.setup() - except Exception: - col._prepare_exc = sys.exc_info() - raise - -# ============================================================= -# Test OutcomeExceptions and helpers for creating them. - - -class OutcomeException(Exception): - """ OutcomeException and its subclass instances indicate and - contain info about test and collection outcomes. - """ - def __init__(self, msg=None, excinfo=None): - self.msg = msg - self.excinfo = excinfo - - def __repr__(self): - if self.msg: - return repr(self.msg) - return "<%s instance>" %(self.__class__.__name__,) - __str__ = __repr__ - -class Skipped(OutcomeException): - # XXX hackish: on 3k we fake to live in the builtins - # in order to have Skipped exception printing shorter/nicer - __module__ = 'builtins' - -class Failed(OutcomeException): - """ raised from an explicit call to py.test.fail() """ - __module__ = 'builtins' - -class XFailed(OutcomeException): - """ raised from an explicit call to py.test.xfail() """ - __module__ = 'builtins' - -class ExceptionFailure(Failed): - """ raised by py.test.raises on an exception-assertion mismatch. """ - def __init__(self, expr, expected, msg=None, excinfo=None): - Failed.__init__(self, msg=msg, excinfo=excinfo) - self.expr = expr - self.expected = expected - -class Exit(KeyboardInterrupt): - """ raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """ - def __init__(self, msg="unknown reason"): - self.msg = msg - KeyboardInterrupt.__init__(self, msg) - -# exposed helper methods - -def exit(msg): - """ exit testing process as if KeyboardInterrupt was triggered. """ - __tracebackhide__ = True - raise Exit(msg) - -exit.Exception = Exit - -def skip(msg=""): - """ skip an executing test with the given message. Note: it's usually - better use the py.test.mark.skipif marker to declare a test to be - skipped under certain conditions like mismatching platforms or - dependencies. See the pytest_skipping plugin for details. - """ - __tracebackhide__ = True - raise Skipped(msg=msg) - -skip.Exception = Skipped - -def fail(msg=""): - """ explicitely fail an currently-executing test with the given Message. """ - __tracebackhide__ = True - raise Failed(msg=msg) - -fail.Exception = Failed - -def xfail(reason=""): - """ xfail an executing test or setup functions, taking an optional - reason string. - """ - __tracebackhide__ = True - raise XFailed(reason) -xfail.Exception = XFailed - -def raises(ExpectedException, *args, **kwargs): - """ if args[0] is callable: raise AssertionError if calling it with - the remaining arguments does not raise the expected exception. - if args[0] is a string: raise AssertionError if executing the - the string in the calling scope does not raise expected exception. - for examples: - x = 5 - raises(TypeError, lambda x: x + 'hello', x=x) - raises(TypeError, "x + 'hello'") - """ - __tracebackhide__ = True - assert args - if isinstance(args[0], str): - code, = args - assert isinstance(code, str) - frame = sys._getframe(1) - loc = frame.f_locals.copy() - loc.update(kwargs) - #print "raises frame scope: %r" % frame.f_locals - try: - code = py.code.Source(code).compile() - py.builtin.exec_(code, frame.f_globals, loc) - # XXX didn'T mean f_globals == f_locals something special? - # this is destroyed here ... - except ExpectedException: - return py.code.ExceptionInfo() - else: - func = args[0] - try: - func(*args[1:], **kwargs) - except ExpectedException: - return py.code.ExceptionInfo() - k = ", ".join(["%s=%r" % x for x in kwargs.items()]) - if k: - k = ', ' + k - expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k) - raise ExceptionFailure(msg="DID NOT RAISE", - expr=args, expected=ExpectedException) - -raises.Exception = ExceptionFailure - -def importorskip(modname, minversion=None): - """ return imported module if it has a higher __version__ than the - optionally specified 'minversion' - otherwise call py.test.skip() - with a message detailing the mismatch. - """ - compile(modname, '', 'eval') # to catch syntaxerrors - try: - mod = __import__(modname, None, None, ['__doc__']) - except ImportError: - py.test.skip("could not import %r" %(modname,)) - if minversion is None: - return mod - verattr = getattr(mod, '__version__', None) - if isinstance(minversion, str): - minver = minversion.split(".") - else: - minver = list(minversion) - if verattr is None or verattr.split(".") < minver: - py.test.skip("module %r has __version__ %r, required is: %r" %( - modname, verattr, minversion)) - return mod - diff --git a/py/_test/funcargs.py b/py/_test/funcargs.py deleted file mode 100644 --- a/py/_test/funcargs.py +++ /dev/null @@ -1,176 +0,0 @@ -import py - -def getfuncargnames(function): - argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0] - startindex = py.std.inspect.ismethod(function) and 1 or 0 - defaults = getattr(function, 'func_defaults', - getattr(function, '__defaults__', None)) or () - numdefaults = len(defaults) - if numdefaults: - return argnames[startindex:-numdefaults] - return argnames[startindex:] - -def fillfuncargs(function): - """ fill missing funcargs. """ - request = FuncargRequest(pyfuncitem=function) - request._fillfuncargs() - -def getplugins(node, withpy=False): # might by any node - plugins = node.config._getmatchingplugins(node.fspath) - if withpy: - mod = node.getparent(py.test.collect.Module) - if mod is not None: - plugins.append(mod.obj) - inst = node.getparent(py.test.collect.Instance) - if inst is not None: - plugins.append(inst.obj) - return plugins - -_notexists = object() -class CallSpec: - def __init__(self, funcargs, id, param): - self.funcargs = funcargs - self.id = id - if param is not _notexists: - self.param = param - def __repr__(self): - return "" %( - self.id, getattr(self, 'param', '?'), self.funcargs) - -class Metafunc: - def __init__(self, function, config=None, cls=None, module=None): - self.config = config - self.module = module - self.function = function - self.funcargnames = getfuncargnames(function) - self.cls = cls - self.module = module - self._calls = [] - self._ids = py.builtin.set() - - def addcall(self, funcargs=None, id=_notexists, param=_notexists): - assert funcargs is None or isinstance(funcargs, dict) - if id is None: - raise ValueError("id=None not allowed") - if id is _notexists: - id = len(self._calls) - id = str(id) - if id in self._ids: - raise ValueError("duplicate id %r" % id) - self._ids.add(id) - self._calls.append(CallSpec(funcargs, id, param)) - -class FuncargRequest: - _argprefix = "pytest_funcarg__" - _argname = None - - class LookupError(LookupError): - """ error on performing funcarg request. """ - - def __init__(self, pyfuncitem): - self._pyfuncitem = pyfuncitem - self.function = pyfuncitem.obj - self.module = pyfuncitem.getparent(py.test.collect.Module).obj - clscol = pyfuncitem.getparent(py.test.collect.Class) - self.cls = clscol and clscol.obj or None - self.instance = py.builtin._getimself(self.function) - self.config = pyfuncitem.config - self.fspath = pyfuncitem.fspath - if hasattr(pyfuncitem, '_requestparam'): - self.param = pyfuncitem._requestparam - self._plugins = getplugins(pyfuncitem, withpy=True) - self._funcargs = self._pyfuncitem.funcargs.copy() - self._name2factory = {} - self._currentarg = None - - def _fillfuncargs(self): - argnames = getfuncargnames(self.function) - if argnames: - assert not getattr(self._pyfuncitem, '_args', None), ( - "yielded functions cannot have funcargs") - for argname in argnames: - if argname not in self._pyfuncitem.funcargs: - self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname) - - def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): - """ cache and return result of calling setup(). - - The requested argument name, the scope and the ``extrakey`` - determine the cache key. The scope also determines when - teardown(result) will be called. valid scopes are: - scope == 'function': when the single test function run finishes. - scope == 'module': when tests in a different module are run - scope == 'session': when tests of the session have run. - """ - if not hasattr(self.config, '_setupcache'): - self.config._setupcache = {} # XXX weakref? - cachekey = (self._currentarg, self._getscopeitem(scope), extrakey) - cache = self.config._setupcache - try: - val = cache[cachekey] - except KeyError: - val = setup() - cache[cachekey] = val - if teardown is not None: - def finalizer(): - del cache[cachekey] - teardown(val) - self._addfinalizer(finalizer, scope=scope) - return val - - def getfuncargvalue(self, argname): - try: - return self._funcargs[argname] - except KeyError: - pass - if argname not in self._name2factory: - self._name2factory[argname] = self.config.pluginmanager.listattr( - plugins=self._plugins, - attrname=self._argprefix + str(argname) - ) - #else: we are called recursively - if not self._name2factory[argname]: - self._raiselookupfailed(argname) - funcargfactory = self._name2factory[argname].pop() - oldarg = self._currentarg - self._currentarg = argname - try: - self._funcargs[argname] = res = funcargfactory(request=self) - finally: - self._currentarg = oldarg - return res - - def _getscopeitem(self, scope): - if scope == "function": - return self._pyfuncitem - elif scope == "module": - return self._pyfuncitem.getparent(py.test.collect.Module) - elif scope == "session": - return None - raise ValueError("unknown finalization scope %r" %(scope,)) - - def _addfinalizer(self, finalizer, scope): - colitem = self._getscopeitem(scope) - self.config._setupstate.addfinalizer( - finalizer=finalizer, colitem=colitem) - - def addfinalizer(self, finalizer): - """ call the given finalizer after test function finished execution. """ - self._addfinalizer(finalizer, scope="function") - - def __repr__(self): - return "" %(self._pyfuncitem) - - def _raiselookupfailed(self, argname): - available = [] - for plugin in self._plugins: - for name in vars(plugin): - if name.startswith(self._argprefix): - name = name[len(self._argprefix):] - if name not in available: - available.append(name) - fspath, lineno, msg = self._pyfuncitem.reportinfo() - msg = "LookupError: no factory found for function argument %r" % (argname,) - msg += "\n available funcargs: %s" %(", ".join(available),) - msg += "\n use 'py.test --funcargs [testpath]' for help on them." - raise self.LookupError(msg) diff --git a/py/_cmdline/pycountloc.py b/py/_cmdline/pycountloc.py deleted file mode 100755 --- a/py/_cmdline/pycountloc.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# hands on script to compute the non-empty Lines of Code -# for tests and non-test code - -"""\ -py.countloc [PATHS] - -Count (non-empty) lines of python code and number of python files recursively -starting from a list of paths given on the command line (starting from the -current working directory). Distinguish between test files and normal ones and -report them separately. -""" -import py - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - (options, args) = parser.parse_args() - countloc(args) - -def nodot(p): - return p.check(dotfile=0) - -class FileCounter(object): - def __init__(self): - self.file2numlines = {} - self.numlines = 0 - self.numfiles = 0 - - def addrecursive(self, directory, fil="*.py", rec=nodot): - for x in directory.visit(fil, rec): - self.addfile(x) - - def addfile(self, fn, emptylines=False): - if emptylines: - s = len(p.readlines()) - else: - s = 0 - for i in fn.readlines(): - if i.strip(): - s += 1 - self.file2numlines[fn] = s - self.numfiles += 1 - self.numlines += s - - def getnumlines(self, fil): - numlines = 0 - for path, value in self.file2numlines.items(): - if fil(path): - numlines += value - return numlines - - def getnumfiles(self, fil): - numfiles = 0 - for path in self.file2numlines: - if fil(path): - numfiles += 1 - return numfiles - -def get_loccount(locations=None): - if locations is None: - localtions = [py.path.local()] - counter = FileCounter() - for loc in locations: - counter.addrecursive(loc, '*.py', rec=nodot) - - def istestfile(p): - return p.check(fnmatch='test_*.py') - isnottestfile = lambda x: not istestfile(x) - - numfiles = counter.getnumfiles(isnottestfile) - numlines = counter.getnumlines(isnottestfile) - numtestfiles = counter.getnumfiles(istestfile) - numtestlines = counter.getnumlines(istestfile) - - return counter, numfiles, numlines, numtestfiles, numtestlines - -def countloc(paths=None): - if not paths: - paths = ['.'] - locations = [py.path.local(x) for x in paths] - (counter, numfiles, numlines, numtestfiles, - numtestlines) = get_loccount(locations) - - items = counter.file2numlines.items() - items.sort(lambda x,y: cmp(x[1], y[1])) - for x, y in items: - print("%3d %30s" % (y,x)) - - print("%30s %3d" %("number of testfiles", numtestfiles)) - print("%30s %3d" %("number of non-empty testlines", numtestlines)) - print("%30s %3d" %("number of files", numfiles)) - print("%30s %3d" %("number of non-empty lines", numlines)) - diff --git a/py/_cmdline/pyconvert_unittest.py b/py/_cmdline/pyconvert_unittest.py deleted file mode 100644 --- a/py/_cmdline/pyconvert_unittest.py +++ /dev/null @@ -1,253 +0,0 @@ -import re -import sys - -try: - import parser -except ImportError: - parser = None - -d={} -# d is the dictionary of unittest changes, keyed to the old name -# used by unittest. -# d[old][0] is the new replacement function. -# d[old][1] is the operator you will substitute, or '' if there is none. -# d[old][2] is the possible number of arguments to the unittest -# function. - -# Old Unittest Name new name operator # of args -d['assertRaises'] = ('raises', '', ['Any']) -d['fail'] = ('raise AssertionError', '', [0,1]) -d['assert_'] = ('assert', '', [1,2]) -d['failIf'] = ('assert not', '', [1,2]) -d['assertEqual'] = ('assert', ' ==', [2,3]) -d['failIfEqual'] = ('assert not', ' ==', [2,3]) -d['assertIn'] = ('assert', ' in', [2,3]) -d['assertNotIn'] = ('assert', ' not in', [2,3]) -d['assertNotEqual'] = ('assert', ' !=', [2,3]) -d['failUnlessEqual'] = ('assert', ' ==', [2,3]) -d['assertAlmostEqual'] = ('assert round', ' ==', [2,3,4]) -d['failIfAlmostEqual'] = ('assert not round', ' ==', [2,3,4]) -d['assertNotAlmostEqual'] = ('assert round', ' !=', [2,3,4]) -d['failUnlessAlmostEquals'] = ('assert round', ' ==', [2,3,4]) - -# the list of synonyms -d['failUnlessRaises'] = d['assertRaises'] -d['failUnless'] = d['assert_'] -d['assertEquals'] = d['assertEqual'] -d['assertNotEquals'] = d['assertNotEqual'] -d['assertAlmostEquals'] = d['assertAlmostEqual'] -d['assertNotAlmostEquals'] = d['assertNotAlmostEqual'] - -# set up the regular expressions we will need -leading_spaces = re.compile(r'^(\s*)') # this never fails - -pat = '' -for k in d.keys(): # this complicated pattern to match all unittests - pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever( - -old_names = re.compile(pat[1:]) -linesep='\n' # nobody will really try to convert files not read - # in text mode, will they? - - -def blocksplitter(fp): - '''split a file into blocks that are headed by functions to rename''' - - blocklist = [] - blockstring = '' - - for line in fp: - interesting = old_names.match(line) - if interesting : - if blockstring: - blocklist.append(blockstring) - blockstring = line # reset the block - else: - blockstring += line - - blocklist.append(blockstring) - return blocklist - -def rewrite_utest(block): - '''rewrite every block to use the new utest functions''' - - '''returns the rewritten unittest, unless it ran into problems, - in which case it just returns the block unchanged. - ''' - utest = old_names.match(block) - - if not utest: - return block - - old = utest.group(0).lstrip()[5:-1] # the name we want to replace - new = d[old][0] # the name of the replacement function - op = d[old][1] # the operator you will use , or '' if there is none. - possible_args = d[old][2] # a list of the number of arguments the - # unittest function could possibly take. - - if possible_args == ['Any']: # just rename assertRaises & friends - return re.sub('self.'+old, new, block) - - message_pos = possible_args[-1] - # the remaining unittests can have an optional message to print - # when they fail. It is always the last argument to the function. - - try: - indent, argl, trailer = decompose_unittest(old, block) - - except SyntaxError: # but we couldn't parse it! - return block - - argnum = len(argl) - if argnum not in possible_args: - # sanity check - this one isn't real either - return block - - elif argnum == message_pos: - message = argl[-1] - argl = argl[:-1] - else: - message = None - - if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail() - string = '' - if message: - message = ' ' + message - - elif message_pos is 4: # assertAlmostEqual & friends - try: - pos = argl[2].lstrip() - except IndexError: - pos = '7' # default if none is specified - string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op ) - - else: # assert_, assertEquals and all the rest - string = ' ' + op.join(argl) - - if message: - string = string + ',' + message - - return indent + new + string + trailer - -def decompose_unittest(old, block): - '''decompose the block into its component parts''' - - ''' returns indent, arglist, trailer - indent -- the indentation - arglist -- the arguments to the unittest function - trailer -- any extra junk after the closing paren, such as #commment - ''' - - indent = re.match(r'(\s*)', block).group() - pat = re.search('self.' + old + r'\(', block) - - args, trailer = get_expr(block[pat.end():], ')') - arglist = break_args(args, []) - - if arglist == ['']: # there weren't any - return indent, [], trailer - - for i in range(len(arglist)): - try: - parser.expr(arglist[i].lstrip('\t ')) - except SyntaxError: - if i == 0: - arglist[i] = '(' + arglist[i] + ')' - else: - arglist[i] = ' (' + arglist[i] + ')' - - return indent, arglist, trailer - -def break_args(args, arglist): - '''recursively break a string into a list of arguments''' - try: - first, rest = get_expr(args, ',') - if not rest: - return arglist + [first] - else: - return [first] + break_args(rest, arglist) - except SyntaxError: - return arglist + [args] - -def get_expr(s, char): - '''split a string into an expression, and the rest of the string''' - - pos=[] - for i in range(len(s)): - if s[i] == char: - pos.append(i) - if pos == []: - raise SyntaxError # we didn't find the expected char. Ick. - - for p in pos: - # make the python parser do the hard work of deciding which comma - # splits the string into two expressions - try: - parser.expr('(' + s[:p] + ')') - return s[:p], s[p+1:] - except SyntaxError: # It's not an expression yet - pass - raise SyntaxError # We never found anything that worked. - - -def main(): - import sys - import py - - usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]" - optparser = py.std.optparse.OptionParser(usage) - - def select_output (option, opt, value, optparser, **kw): - if hasattr(optparser, 'output'): - optparser.error( - 'Cannot combine -s -i and -c options. Use one only.') - else: - optparser.output = kw['output'] - - optparser.add_option("-s", "--stdout", action="callback", - callback=select_output, - callback_kwargs={'output':'stdout'}, - help="send your output to stdout") - - optparser.add_option("-i", "--inplace", action="callback", - callback=select_output, - callback_kwargs={'output':'inplace'}, - help="overwrite files in place") - - optparser.add_option("-c", "--copy", action="callback", - callback=select_output, - callback_kwargs={'output':'copy'}, - help="copy files ... fn.py --> fn_cp.py") - - options, args = optparser.parse_args() - - output = getattr(optparser, 'output', 'stdout') - - if output in ['inplace', 'copy'] and not args: - optparser.error( - '-i and -c option require at least one filename') - - if not args: - s = '' - for block in blocksplitter(sys.stdin): - s += rewrite_utest(block) - sys.stdout.write(s) - - else: - for infilename in args: # no error checking to see if we can open, etc. - infile = file(infilename) - s = '' - for block in blocksplitter(infile): - s += rewrite_utest(block) - if output == 'inplace': - outfile = file(infilename, 'w+') - elif output == 'copy': # yes, just go clobber any existing .cp - outfile = file (infilename[:-3]+ '_cp.py', 'w+') - else: - outfile = sys.stdout - - outfile.write(s) - - -if __name__ == '__main__': - main() diff --git a/py/_compat/dep_doctest.py b/py/_compat/dep_doctest.py deleted file mode 100644 --- a/py/_compat/dep_doctest.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", -stacklevel="apipkg") -doctest = py.std.doctest diff --git a/py/_test/__init__.py b/py/_test/__init__.py deleted file mode 100644 --- a/py/_test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -""" assertion and py.test helper API.""" diff --git a/py/_cmdline/__init__.py b/py/_cmdline/__init__.py deleted file mode 100644 --- a/py/_cmdline/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_plugin/pytest_unittest.py b/py/_plugin/pytest_unittest.py deleted file mode 100644 --- a/py/_plugin/pytest_unittest.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -automatically discover and run traditional "unittest.py" style tests. - -Usage ----------------- - -This plugin collects and runs Python `unittest.py style`_ tests. -It will automatically collect ``unittest.TestCase`` subclasses -and their ``test`` methods from the test modules of a project -(usually following the ``test_*.py`` pattern). - -This plugin is enabled by default. - -.. _`unittest.py style`: http://docs.python.org/library/unittest.html -""" -import py -import sys - -def pytest_pycollect_makeitem(collector, name, obj): - if 'unittest' not in sys.modules: - return # nobody derived unittest.TestCase - try: - isunit = issubclass(obj, py.std.unittest.TestCase) - except KeyboardInterrupt: - raise - except Exception: - pass - else: - if isunit: - return UnitTestCase(name, parent=collector) - -class UnitTestCase(py.test.collect.Class): - def collect(self): - return [UnitTestCaseInstance("()", self)] - - def setup(self): - pass - - def teardown(self): - pass - -_dummy = object() -class UnitTestCaseInstance(py.test.collect.Instance): - def collect(self): - loader = py.std.unittest.TestLoader() - names = loader.getTestCaseNames(self.obj.__class__) - l = [] - for name in names: - callobj = getattr(self.obj, name) - if py.builtin.callable(callobj): - l.append(UnitTestFunction(name, parent=self)) - return l - - def _getobj(self): - x = self.parent.obj - return self.parent.obj(methodName='run') - -class UnitTestFunction(py.test.collect.Function): - def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None): - super(UnitTestFunction, self).__init__(name, parent) - self._args = args - if obj is not _dummy: - self._obj = obj - self._sort_value = sort_value - if hasattr(self.parent, 'newinstance'): - self.parent.newinstance() - self.obj = self._getobj() - - def runtest(self): - target = self.obj - args = self._args - target(*args) - - def setup(self): - instance = py.builtin._getimself(self.obj) - instance.setUp() - - def teardown(self): - instance = py.builtin._getimself(self.obj) - instance.tearDown() - diff --git a/py/_path/gateway/channeltest.py b/py/_path/gateway/channeltest.py deleted file mode 100644 --- a/py/_path/gateway/channeltest.py +++ /dev/null @@ -1,65 +0,0 @@ -import threading - - -class PathServer: - - def __init__(self, channel): - self.channel = channel - self.C2P = {} - self.next_id = 0 - threading.Thread(target=self.serve).start() - - def p2c(self, path): - id = self.next_id - self.next_id += 1 - self.C2P[id] = path - return id - - def command_LIST(self, id, *args): - path = self.C2P[id] - answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)] - self.channel.send(answer) - - def command_DEL(self, id): - del self.C2P[id] - - def command_GET(self, id, spec): - path = self.C2P[id] - self.channel.send(path._getbyspec(spec)) - - def command_READ(self, id): - path = self.C2P[id] - self.channel.send(path.read()) - - def command_JOIN(self, id, resultid, *args): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.join(*args) - - def command_DIRPATH(self, id, resultid): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.dirpath() - - def serve(self): - try: - while 1: - msg = self.channel.receive() - meth = getattr(self, 'command_' + msg[0]) - meth(*msg[1:]) - except EOFError: - pass - -if __name__ == '__main__': - import py - gw = execnet.PopenGateway() - channel = gw._channelfactory.new() - srv = PathServer(channel) - c = gw.remote_exec(""" - import remotepath - p = remotepath.RemotePath(channel.receive(), channel.receive()) - channel.send(len(p.listdir())) - """) - c.send(channel) - c.send(srv.p2c(py.path.local('/tmp'))) - print(c.receive()) diff --git a/py/_compat/dep_textwrap.py b/py/_compat/dep_textwrap.py deleted file mode 100644 --- a/py/_compat/dep_textwrap.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", - stacklevel="apipkg") -textwrap = py.std.textwrap diff --git a/pypy/tool/test/conftest1_innertest.py b/pypy/tool/test/conftest1_innertest.py deleted file mode 100644 --- a/pypy/tool/test/conftest1_innertest.py +++ /dev/null @@ -1,15 +0,0 @@ - -def test_something(space): - assert space.w_None is space.w_None - -def app_test_something(): - assert 42 == 42 - -class AppTestSomething: - def test_method_app(self): - assert 23 == 23 - -class TestSomething: - def test_method(self): - assert self.space - diff --git a/py/_plugin/pytest_pastebin.py b/py/_plugin/pytest_pastebin.py deleted file mode 100644 --- a/py/_plugin/pytest_pastebin.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -submit failure or test session information to a pastebin service. - -Usage ----------- - -**Creating a URL for each test failure**:: - - py.test --pastebin=failed - -This will submit test run information to a remote Paste service and -provide a URL for each failure. You may select tests as usual or add -for example ``-x`` if you only want to send one particular failure. - -**Creating a URL for a whole test session log**:: - - py.test --pastebin=all - -Currently only pasting to the http://paste.pocoo.org service is implemented. - -""" -import py, sys - -class url: - base = "http://paste.pocoo.org" - xmlrpc = base + "/xmlrpc/" - show = base + "/show/" - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group._addoption('--pastebin', metavar="mode", - action='store', dest="pastebin", default=None, - type="choice", choices=['failed', 'all'], - help="send failed|all info to Pocoo pastebin service.") - -def pytest_configure(__multicall__, config): - import tempfile - __multicall__.execute() - if config.option.pastebin == "all": - config._pastebinfile = tempfile.TemporaryFile('w+') - tr = config.pluginmanager.getplugin('terminalreporter') - oldwrite = tr._tw.write - def tee_write(s, **kwargs): - oldwrite(s, **kwargs) - config._pastebinfile.write(str(s)) - tr._tw.write = tee_write - -def pytest_unconfigure(config): - if hasattr(config, '_pastebinfile'): - config._pastebinfile.seek(0) - sessionlog = config._pastebinfile.read() - config._pastebinfile.close() - del config._pastebinfile - proxyid = getproxy().newPaste("python", sessionlog) - pastebinurl = "%s%s" % (url.show, proxyid) - sys.stderr.write("pastebin session-log: %s\n" % pastebinurl) - tr = config.pluginmanager.getplugin('terminalreporter') - del tr._tw.__dict__['write'] - -def getproxy(): - return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes - -def pytest_terminal_summary(terminalreporter): - if terminalreporter.config.option.pastebin != "failed": - return - tr = terminalreporter - if 'failed' in tr.stats: - terminalreporter.write_sep("=", "Sending information to Paste Service") - if tr.config.option.debug: - terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,)) - serverproxy = getproxy() - for rep in terminalreporter.stats.get('failed'): - try: - msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc - except AttributeError: - msg = tr._getfailureheadline(rep) - tw = py.io.TerminalWriter(stringio=True) - rep.toterminal(tw) - s = tw.stringio.getvalue() - assert len(s) - proxyid = serverproxy.newPaste("python", s) - pastebinurl = "%s%s" % (url.show, proxyid) - tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/py/_test/collect.py b/py/_test/collect.py deleted file mode 100644 --- a/py/_test/collect.py +++ /dev/null @@ -1,418 +0,0 @@ -""" -test collection nodes, forming a tree, Items are leafs. -""" -import py - -def configproperty(name): - def fget(self): - #print "retrieving %r property from %s" %(name, self.fspath) - return self.config._getcollectclass(name, self.fspath) - return property(fget) - -class HookProxy: - def __init__(self, node): - self.node = node - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - hookmethod = getattr(self.node.config.hook, name) - def call_matching_hooks(**kwargs): - plugins = self.node.config._getmatchingplugins(self.node.fspath) - return hookmethod.pcall(plugins, **kwargs) - return call_matching_hooks - -class Node(object): - """ base class for all Nodes in the collection tree. - Collector subclasses have children, Items are terminal nodes. - """ - def __init__(self, name, parent=None, config=None): - self.name = name - self.parent = parent - self.config = config or parent.config - self.fspath = getattr(parent, 'fspath', None) - self.ihook = HookProxy(self) - - def _reraiseunpicklingproblem(self): - if hasattr(self, '_unpickle_exc'): - py.builtin._reraise(*self._unpickle_exc) - - # - # note to myself: Pickling is uh. - # - def __getstate__(self): - return (self.name, self.parent) - def __setstate__(self, nameparent): - name, parent = nameparent - try: - colitems = parent._memocollect() - for colitem in colitems: - if colitem.name == name: - # we are a copy that will not be returned - # by our parent - self.__dict__ = colitem.__dict__ - break - else: - raise ValueError("item %r not found in parent collection %r" %( - name, [x.name for x in colitems])) - except KeyboardInterrupt: - raise - except Exception: - # our parent can't collect us but we want unpickling to - # otherwise continue - self._reraiseunpicklingproblem() will - # reraise the problem - self._unpickle_exc = py.std.sys.exc_info() - self.name = name - self.parent = parent - self.config = parent.config - - def __repr__(self): - if getattr(self.config.option, 'debug', False): - return "<%s %r %0x>" %(self.__class__.__name__, - getattr(self, 'name', None), id(self)) - else: - return "<%s %r>" %(self.__class__.__name__, - getattr(self, 'name', None)) - - # methods for ordering nodes - - def __eq__(self, other): - if not isinstance(other, Node): - return False - return self.name == other.name and self.parent == other.parent - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.name, self.parent)) - - def setup(self): - pass - - def teardown(self): - pass - - def _memoizedcall(self, attrname, function): - exattrname = "_ex_" + attrname - failure = getattr(self, exattrname, None) - if failure is not None: - py.builtin._reraise(failure[0], failure[1], failure[2]) - if hasattr(self, attrname): - return getattr(self, attrname) - try: - res = function() - except (KeyboardInterrupt, SystemExit): - raise - except: - failure = py.std.sys.exc_info() - setattr(self, exattrname, failure) - raise - setattr(self, attrname, res) - return res - - def listchain(self): - """ return list of all parent collectors up to self, - starting from root of collection tree. """ - l = [self] - while 1: - x = l[0] - if x.parent is not None and x.parent.parent is not None: - l.insert(0, x.parent) - else: - return l - - def listnames(self): - return [x.name for x in self.listchain()] - - def getparent(self, cls): - current = self - while current and not isinstance(current, cls): - current = current.parent - return current - - def readkeywords(self): - return dict([(x, True) for x in self._keywords()]) - - def _keywords(self): - return [self.name] - - def _skipbykeyword(self, keywordexpr): - """ return True if they given keyword expression means to - skip this collector/item. - """ - if not keywordexpr: - return - chain = self.listchain() - for key in filter(None, keywordexpr.split()): - eor = key[:1] == '-' - if eor: - key = key[1:] - if not (eor ^ self._matchonekeyword(key, chain)): - return True - - def _matchonekeyword(self, key, chain): - elems = key.split(".") - # XXX O(n^2), anyone cares? - chain = [item.readkeywords() for item in chain if item._keywords()] - for start, _ in enumerate(chain): - if start + len(elems) > len(chain): - return False - for num, elem in enumerate(elems): - for keyword in chain[num + start]: - ok = False - if elem in keyword: - ok = True - break - if not ok: - break - if num == len(elems) - 1 and ok: - return True - return False - - def _prunetraceback(self, traceback): - return traceback - - def _repr_failure_py(self, excinfo, style=None): - excinfo.traceback = self._prunetraceback(excinfo.traceback) - # XXX should excinfo.getrepr record all data and toterminal() - # process it? - if style is None: - if self.config.option.tbstyle == "short": - style = "short" - else: - style = "long" - return excinfo.getrepr(funcargs=True, - showlocals=self.config.option.showlocals, - style=style) - - repr_failure = _repr_failure_py - shortfailurerepr = "F" - -class Collector(Node): - """ - Collector instances create children through collect() - and thus iteratively build a tree. attributes:: - - parent: attribute pointing to the parent collector - (or None if this is the root collector) - name: basename of this collector object - """ - Directory = configproperty('Directory') - Module = configproperty('Module') - - def collect(self): - """ returns a list of children (items and collectors) - for this collection node. - """ - raise NotImplementedError("abstract") - - def collect_by_name(self, name): - """ return a child matching the given name, else None. """ - for colitem in self._memocollect(): - if colitem.name == name: - return colitem - - def repr_failure(self, excinfo, outerr=None): - """ represent a failure. """ - assert outerr is None, "XXX deprecated" - return self._repr_failure_py(excinfo) - - def _memocollect(self): - """ internal helper method to cache results of calling collect(). """ - return self._memoizedcall('_collected', self.collect) - - # ********************************************************************** - # DEPRECATED METHODS - # ********************************************************************** - - def _deprecated_collect(self): - # avoid recursion: - # collect -> _deprecated_collect -> custom run() -> - # super().run() -> collect - attrname = '_depcollectentered' - if hasattr(self, attrname): - return - setattr(self, attrname, True) - method = getattr(self.__class__, 'run', None) - if method is not None and method != Collector.run: - warnoldcollect(function=method) - names = self.run() - return [x for x in [self.join(name) for name in names] if x] - - def run(self): - """ DEPRECATED: returns a list of names available from this collector. - You can return an empty list. Callers of this method - must take care to catch exceptions properly. - """ - return [colitem.name for colitem in self._memocollect()] - - def join(self, name): - """ DEPRECATED: return a child collector or item for the given name. - If the return value is None there is no such child. - """ - return self.collect_by_name(name) - - def _prunetraceback(self, traceback): - if hasattr(self, 'fspath'): - path = self.fspath - ntraceback = traceback.cut(path=self.fspath) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - -class FSCollector(Collector): - def __init__(self, fspath, parent=None, config=None): - fspath = py.path.local(fspath) - super(FSCollector, self).__init__(fspath.basename, parent, config=config) - self.fspath = fspath - - def __getstate__(self): - # RootCollector.getbynames() inserts a directory which we need - # to throw out here for proper re-instantiation - if isinstance(self.parent.parent, RootCollector): - assert self.parent.fspath == self.parent.parent.fspath, self.parent - return (self.name, self.parent.parent) # shortcut - return super(Collector, self).__getstate__() - -class File(FSCollector): - """ base class for collecting tests from a file. """ - -class Directory(FSCollector): - def recfilter(self, path): - if path.check(dir=1, dotfile=0): - return path.basename not in ('CVS', '_darcs', '{arch}') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - l = [] - for path in self.fspath.listdir(sort=True): - res = self.consider(path) - if res is not None: - if isinstance(res, (list, tuple)): - l.extend(res) - else: - l.append(res) - return l - - def consider(self, path): - if self.ihook.pytest_ignore_collect(path=path, config=self.config): - return - if path.check(file=1): - res = self.consider_file(path) - elif path.check(dir=1): - res = self.consider_dir(path) - else: - res = None - if isinstance(res, list): - # throw out identical results - l = [] - for x in res: - if x not in l: - assert x.parent == self, (x.parent, self) - assert x.fspath == path, (x.fspath, path) - l.append(x) - res = l - return res - - def consider_file(self, path): - return self.ihook.pytest_collect_file(path=path, parent=self) - - def consider_dir(self, path, usefilters=None): - if usefilters is not None: - py.log._apiwarn("0.99", "usefilters argument not needed") - return self.ihook.pytest_collect_directory(path=path, parent=self) - -class Item(Node): - """ a basic test item. """ - def _deprecated_testexecution(self): - if self.__class__.run != Item.run: - warnoldtestrun(function=self.run) - elif self.__class__.execute != Item.execute: - warnoldtestrun(function=self.execute) - else: - return False - self.run() - return True - - def run(self): - """ deprecated, here because subclasses might call it. """ - return self.execute(self.obj) - - def execute(self, obj): - """ deprecated, here because subclasses might call it. """ - return obj() - - def reportinfo(self): - return self.fspath, None, "" - -def warnoldcollect(function=None): - py.log._apiwarn("1.0", - "implement collector.collect() instead of " - "collector.run() and collector.join()", - stacklevel=2, function=function) - -def warnoldtestrun(function=None): - py.log._apiwarn("1.0", - "implement item.runtest() instead of " - "item.run() and item.execute()", - stacklevel=2, function=function) - - - -class RootCollector(Directory): - def __init__(self, config): - Directory.__init__(self, config.topdir, parent=None, config=config) - self.name = None - - def __repr__(self): - return "" %(self.fspath,) - - def getbynames(self, names): - current = self.consider(self.config.topdir) - while names: - name = names.pop(0) - if name == ".": # special "identity" name - continue - l = [] - for x in current._memocollect(): - if x.name == name: - l.append(x) - elif x.fspath == current.fspath.join(name): - l.append(x) - elif x.name == "()": - names.insert(0, name) - l.append(x) - break - if not l: - raise ValueError("no node named %r below %r" %(name, current)) - current = l[0] - return current - - def totrail(self, node): - chain = node.listchain() - names = [self._getrelpath(chain[0].fspath)] - names += [x.name for x in chain[1:]] - return names - - def fromtrail(self, trail): - return self.config._rootcol.getbynames(trail) - - def _getrelpath(self, fspath): - topdir = self.config.topdir - relpath = fspath.relto(topdir) - if not relpath: - if fspath == topdir: - relpath = "." - else: - raise ValueError("%r not relative to topdir %s" - %(self.fspath, topdir)) - return relpath - - def __getstate__(self): - return self.config - - def __setstate__(self, config): - self.__init__(config) diff --git a/py/_plugin/pytest_capture.py b/py/_plugin/pytest_capture.py deleted file mode 100644 --- a/py/_plugin/pytest_capture.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -configurable per-test stdout/stderr capturing mechanisms. - -This plugin captures stdout/stderr output for each test separately. -In case of test failures this captured output is shown grouped -togtther with the test. - -The plugin also provides test function arguments that help to -assert stdout/stderr output from within your tests, see the -`funcarg example`_. - - -Capturing of input/output streams during tests ---------------------------------------------------- - -By default ``sys.stdout`` and ``sys.stderr`` are substituted with -temporary streams during the execution of tests and setup/teardown code. -During the whole testing process it will re-use the same temporary -streams allowing to play well with the logging module which easily -takes ownership on these streams. - -Also, 'sys.stdin' is substituted with a file-like "null" object that -does not return any values. This is to immediately error out -on tests that wait on reading something from stdin. - -You can influence output capturing mechanisms from the command line:: - - py.test -s # disable all capturing - py.test --capture=sys # replace sys.stdout/stderr with in-mem files - py.test --capture=fd # point filedescriptors 1 and 2 to temp file - -If you set capturing values in a conftest file like this:: - - # conftest.py - option_capture = 'fd' - -then all tests in that directory will execute with "fd" style capturing. - -sys-level capturing ------------------------------------------- - -Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` -will be replaced with in-memory files (``py.io.TextIO`` to be precise) -that capture writes and decode non-unicode strings to a unicode object -(using a default, usually, UTF-8, encoding). - -FD-level capturing and subprocesses ------------------------------------------- - -The ``fd`` based method means that writes going to system level files -based on the standard file descriptors will be captured, for example -writes such as ``os.write(1, 'hello')`` will be captured properly. -Capturing on fd-level will include output generated from -any subprocesses created during a test. - -.. _`funcarg example`: - -Example Usage of the capturing Function arguments ---------------------------------------------------- - -You can use the `capsys funcarg`_ and `capfd funcarg`_ to -capture writes to stdout and stderr streams. Using the -funcargs frees your test from having to care about setting/resetting -the old streams and also interacts well with py.test's own -per-test capturing. Here is an example test function: - -.. sourcecode:: python - - def test_myoutput(capsys): - print ("hello") - sys.stderr.write("world\\n") - out, err = capsys.readouterr() - assert out == "hello\\n" - assert err == "world\\n" - print "next" - out, err = capsys.readouterr() - assert out == "next\\n" - -The ``readouterr()`` call snapshots the output so far - -and capturing will be continued. After the test -function finishes the original streams will -be restored. If you want to capture on -the filedescriptor level you can use the ``capfd`` function -argument which offers the same interface. -""" - -import py -import os - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--capture', action="store", default=None, - metavar="method", type="choice", choices=['fd', 'sys', 'no'], - help="per-test capturing method: one of fd (default)|sys|no.") - group._addoption('-s', action="store_const", const="no", dest="capture", - help="shortcut for --capture=no.") - -def addouterr(rep, outerr): - repr = getattr(rep, 'longrepr', None) - if not hasattr(repr, 'addsection'): - return - for secname, content in zip(["out", "err"], outerr): - if content: - repr.addsection("Captured std%s" % secname, content.rstrip()) - -def pytest_configure(config): - config.pluginmanager.register(CaptureManager(), 'capturemanager') - -class NoCapture: - def startall(self): - pass - def resume(self): - pass - def suspend(self): - return "", "" - -class CaptureManager: - def __init__(self): - self._method2capture = {} - - def _maketempfile(self): - f = py.std.tempfile.TemporaryFile() - newf = py.io.dupfile(f, encoding="UTF-8") - return newf - - def _makestringio(self): - return py.io.TextIO() - - def _getcapture(self, method): - if method == "fd": - return py.io.StdCaptureFD(now=False, - out=self._maketempfile(), err=self._maketempfile() - ) - elif method == "sys": - return py.io.StdCapture(now=False, - out=self._makestringio(), err=self._makestringio() - ) - elif method == "no": - return NoCapture() - else: - raise ValueError("unknown capturing method: %r" % method) - - def _getmethod(self, config, fspath): - if config.option.capture: - method = config.option.capture - else: - try: - method = config._conftest.rget("option_capture", path=fspath) - except KeyError: - method = "fd" - if method == "fd" and not hasattr(os, 'dup'): # e.g. jython - method = "sys" - return method - - def resumecapture_item(self, item): - method = self._getmethod(item.config, item.fspath) - if not hasattr(item, 'outerr'): - item.outerr = ('', '') # we accumulate outerr on the item - return self.resumecapture(method) - - def resumecapture(self, method): - if hasattr(self, '_capturing'): - raise ValueError("cannot resume, already capturing with %r" % - (self._capturing,)) - cap = self._method2capture.get(method) - self._capturing = method - if cap is None: - self._method2capture[method] = cap = self._getcapture(method) - cap.startall() - else: - cap.resume() - - def suspendcapture(self, item=None): - self.deactivate_funcargs() - if hasattr(self, '_capturing'): - method = self._capturing - cap = self._method2capture.get(method) - if cap is not None: - outerr = cap.suspend() - del self._capturing - if item: - outerr = (item.outerr[0] + outerr[0], - item.outerr[1] + outerr[1]) - return outerr - return "", "" - - def activate_funcargs(self, pyfuncitem): - if not hasattr(pyfuncitem, 'funcargs'): - return - assert not hasattr(self, '_capturing_funcargs') - self._capturing_funcargs = capturing_funcargs = [] - for name, capfuncarg in pyfuncitem.funcargs.items(): - if name in ('capsys', 'capfd'): - capturing_funcargs.append(capfuncarg) - capfuncarg._start() - - def deactivate_funcargs(self): - capturing_funcargs = getattr(self, '_capturing_funcargs', None) - if capturing_funcargs is not None: - while capturing_funcargs: - capfuncarg = capturing_funcargs.pop() - capfuncarg._finalize() - del self._capturing_funcargs - - def pytest_make_collect_report(self, __multicall__, collector): - method = self._getmethod(collector.config, collector.fspath) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - addouterr(rep, outerr) - return rep - - def pytest_runtest_setup(self, item): - self.resumecapture_item(item) - - def pytest_runtest_call(self, item): - self.resumecapture_item(item) - self.activate_funcargs(item) - - def pytest_runtest_teardown(self, item): - self.resumecapture_item(item) - - def pytest__teardown_final(self, __multicall__, session): - method = self._getmethod(session.config, None) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - if rep: - addouterr(rep, outerr) - return rep - - def pytest_keyboard_interrupt(self, excinfo): - if hasattr(self, '_capturing'): - self.suspendcapture() - - def pytest_runtest_makereport(self, __multicall__, item, call): - self.deactivate_funcargs() - rep = __multicall__.execute() - outerr = self.suspendcapture(item) - if not rep.passed: - addouterr(rep, outerr) - if not rep.passed or rep.when == "teardown": - outerr = ('', '') - item.outerr = outerr - return rep - -def pytest_funcarg__capsys(request): - """captures writes to sys.stdout/sys.stderr and makes - them available successively via a ``capsys.readouterr()`` method - which returns a ``(out, err)`` tuple of captured snapshot strings. - """ - return CaptureFuncarg(request, py.io.StdCapture) - -def pytest_funcarg__capfd(request): - """captures writes to file descriptors 1 and 2 and makes - snapshotted ``(out, err)`` string tuples available - via the ``capsys.readouterr()`` method. If the underlying - platform does not have ``os.dup`` (e.g. Jython) tests using - this funcarg will automatically skip. - """ - if not hasattr(os, 'dup'): - py.test.skip("capfd funcarg needs os.dup") - return CaptureFuncarg(request, py.io.StdCaptureFD) - - -class CaptureFuncarg: - def __init__(self, request, captureclass): - self._cclass = captureclass - self.capture = self._cclass(now=False) - #request.addfinalizer(self._finalize) - - def _start(self): - self.capture.startall() - - def _finalize(self): - if hasattr(self, 'capture'): - self.capture.reset() - del self.capture - - def readouterr(self): - return self.capture.readouterr() - - def close(self): - self._finalize() diff --git a/py/_compat/__init__.py b/py/_compat/__init__.py deleted file mode 100644 --- a/py/_compat/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" compatibility modules (taken from 2.4.4) """ - diff --git a/py/_compat/dep_subprocess.py b/py/_compat/dep_subprocess.py deleted file mode 100644 --- a/py/_compat/dep_subprocess.py +++ /dev/null @@ -1,5 +0,0 @@ - -import py -py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", -stacklevel="apipkg") -subprocess = py.std.subprocess diff --git a/py/_plugin/pytest_pylint.py b/py/_plugin/pytest_pylint.py deleted file mode 100644 --- a/py/_plugin/pytest_pylint.py +++ /dev/null @@ -1,36 +0,0 @@ -"""pylint plugin - -XXX: Currently in progress, NOT IN WORKING STATE. -""" -import py - -pylint = py.test.importorskip("pylint.lint") - -def pytest_addoption(parser): - group = parser.getgroup('pylint options') - group.addoption('--pylint', action='store_true', - default=False, dest='pylint', - help='run pylint on python files.') - -def pytest_collect_file(path, parent): - if path.ext == ".py": - if parent.config.getvalue('pylint'): - return PylintItem(path, parent) - -#def pytest_terminal_summary(terminalreporter): -# print 'placeholder for pylint output' - -class PylintItem(py.test.collect.Item): - def runtest(self): - capture = py.io.StdCaptureFD() - try: - linter = pylint.lint.PyLinter() - linter.check(str(self.fspath)) - finally: - out, err = capture.reset() - rating = out.strip().split('\n')[-1] - sys.stdout.write(">>>") - print(rating) - assert 0 - - diff --git a/py/_test/conftesthandle.py b/py/_test/conftesthandle.py deleted file mode 100644 --- a/py/_test/conftesthandle.py +++ /dev/null @@ -1,113 +0,0 @@ -import py - -class Conftest(object): - """ the single place for accessing values and interacting - towards conftest modules from py.test objects. - - (deprecated) - Note that triggering Conftest instances to import - conftest.py files may result in added cmdline options. - """ - def __init__(self, onimport=None, confcutdir=None): - self._path2confmods = {} - self._onimport = onimport - self._conftestpath2mod = {} - self._confcutdir = confcutdir - - def setinitial(self, args): - """ try to find a first anchor path for looking up global values - from conftests. This function is usually called _before_ - argument parsing. conftest files may add command line options - and we thus have no completely safe way of determining - which parts of the arguments are actually related to options - and which are file system paths. We just try here to get - bootstrapped ... - """ - current = py.path.local() - opt = '--confcutdir' - for i in range(len(args)): - opt1 = str(args[i]) - if opt1.startswith(opt): - if opt1 == opt: - if len(args) > i: - p = current.join(args[i+1], abs=True) - elif opt1.startswith(opt + "="): - p = current.join(opt1[len(opt)+1:], abs=1) - self._confcutdir = p - break - for arg in args + [current]: - anchor = current.join(arg, abs=1) - if anchor.check(): # we found some file object - self._path2confmods[None] = self.getconftestmodules(anchor) - # let's also consider test* dirs - if anchor.check(dir=1): - for x in anchor.listdir(lambda x: x.check(dir=1, dotfile=0)): - self.getconftestmodules(x) - break - else: - assert 0, "no root of filesystem?" - - def getconftestmodules(self, path): - """ return a list of imported conftest modules for the given path. """ - try: - clist = self._path2confmods[path] - except KeyError: - if path is None: - raise ValueError("missing default confest.") - dp = path.dirpath() - if dp == path: - clist = [] - else: - cutdir = self._confcutdir - clist = self.getconftestmodules(dp) - if cutdir and path != cutdir and not path.relto(cutdir): - pass - else: - conftestpath = path.join("conftest.py") - if conftestpath.check(file=1): - clist.append(self.importconftest(conftestpath)) - self._path2confmods[path] = clist - # be defensive: avoid changes from caller side to - # affect us by always returning a copy of the actual list - return clist[:] - - def rget(self, name, path=None): - mod, value = self.rget_with_confmod(name, path) - return value - - def rget_with_confmod(self, name, path=None): - modules = self.getconftestmodules(path) - modules.reverse() - for mod in modules: - try: - return mod, getattr(mod, name) - except AttributeError: - continue - raise KeyError(name) - - def importconftest(self, conftestpath): - assert conftestpath.check(), conftestpath - try: - return self._conftestpath2mod[conftestpath] - except KeyError: - if not conftestpath.dirpath('__init__.py').check(file=1): - # HACK: we don't want any "globally" imported conftest.py, - # prone to conflicts and subtle problems - modname = str(conftestpath).replace('.', conftestpath.sep) - mod = conftestpath.pyimport(modname=modname) - else: - mod = conftestpath.pyimport() - self._conftestpath2mod[conftestpath] = mod - dirpath = conftestpath.dirpath() - if dirpath in self._path2confmods: - for path, mods in self._path2confmods.items(): - if path and path.relto(dirpath) or path == dirpath: - assert mod not in mods - mods.append(mod) - self._postimport(mod) - return mod - - def _postimport(self, mod): - if self._onimport: - self._onimport(mod) - return mod diff --git a/py/_test/pycollect.py b/py/_test/pycollect.py deleted file mode 100644 --- a/py/_test/pycollect.py +++ /dev/null @@ -1,399 +0,0 @@ -""" -Python related collection nodes. -""" -import py -import inspect -from py._test.collect import configproperty, warnoldcollect -from py._test import funcargs -from py._code.code import TerminalRepr - -class PyobjMixin(object): - def obj(): - def fget(self): - try: - return self._obj - except AttributeError: - self._obj = obj = self._getobj() - return obj - def fset(self, value): - self._obj = value - return property(fget, fset, None, "underlying python object") - obj = obj() - - def _getobj(self): - return getattr(self.parent.obj, self.name) - - def getmodpath(self, stopatmodule=True, includemodule=False): - """ return python path relative to the containing module. """ - chain = self.listchain() - chain.reverse() - parts = [] - for node in chain: - if isinstance(node, Instance): - continue - name = node.name - if isinstance(node, Module): - assert name.endswith(".py") - name = name[:-3] - if stopatmodule: - if includemodule: - parts.append(name) - break - parts.append(name) - parts.reverse() - s = ".".join(parts) - return s.replace(".[", "[") - - def _getfslineno(self): - try: - return self._fslineno - except AttributeError: - pass - obj = self.obj - # xxx let decorators etc specify a sane ordering - if hasattr(obj, 'place_as'): - obj = obj.place_as - - self._fslineno = py.code.getfslineno(obj) - return self._fslineno - - def reportinfo(self): - fspath, lineno = self._getfslineno() - modpath = self.getmodpath() - return fspath, lineno, modpath - -class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): - Class = configproperty('Class') - Instance = configproperty('Instance') - Function = configproperty('Function') - Generator = configproperty('Generator') - - def funcnamefilter(self, name): - return name.startswith('test') - def classnamefilter(self, name): - return name.startswith('Test') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - # NB. we avoid random getattrs and peek in the __dict__ instead - dicts = [getattr(self.obj, '__dict__', {})] - for basecls in inspect.getmro(self.obj.__class__): - dicts.append(basecls.__dict__) - seen = {} - l = [] - for dic in dicts: - for name, obj in dic.items(): - if name in seen: - continue - seen[name] = True - if name[0] != "_": - res = self.makeitem(name, obj) - if res is None: - continue - if not isinstance(res, list): - res = [res] - l.extend(res) - l.sort(key=lambda item: item.reportinfo()[:2]) - return l - - def _deprecated_join(self, name): - if self.__class__.join != py.test.collect.Collector.join: - warnoldcollect() - return self.join(name) - - def makeitem(self, name, obj): - return self.ihook.pytest_pycollect_makeitem( - collector=self, name=name, obj=obj) - - def _istestclasscandidate(self, name, obj): - if self.classnamefilter(name) and \ - inspect.isclass(obj): - if hasinit(obj): - # XXX WARN - return False - return True - - def _genfunctions(self, name, funcobj): - module = self.getparent(Module).obj - clscol = self.getparent(Class) - cls = clscol and clscol.obj or None - metafunc = funcargs.Metafunc(funcobj, config=self.config, - cls=cls, module=module) - gentesthook = self.config.hook.pytest_generate_tests - plugins = funcargs.getplugins(self, withpy=True) - gentesthook.pcall(plugins, metafunc=metafunc) - if not metafunc._calls: - return self.Function(name, parent=self) - l = [] - for callspec in metafunc._calls: - subname = "%s[%s]" %(name, callspec.id) - function = self.Function(name=subname, parent=self, - callspec=callspec, callobj=funcobj) - l.append(function) - return l - -class Module(py.test.collect.File, PyCollectorMixin): - def _getobj(self): - return self._memoizedcall('_obj', self._importtestmodule) - - def _importtestmodule(self): - # we assume we are only called once per module - mod = self.fspath.pyimport() - #print "imported test module", mod - self.config.pluginmanager.consider_module(mod) - return mod - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - if hasattr(self.obj, 'setup_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.setup_module)[0]: - self.obj.setup_module(self.obj) - else: - self.obj.setup_module() - - def teardown(self): - if hasattr(self.obj, 'teardown_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.teardown_module)[0]: - self.obj.teardown_module(self.obj) - else: - self.obj.teardown_module() - -class Class(PyCollectorMixin, py.test.collect.Collector): - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - return [self.Instance(name="()", parent=self)] - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - setup_class = getattr(self.obj, 'setup_class', None) - if setup_class is not None: - setup_class = getattr(setup_class, 'im_func', setup_class) - setup_class(self.obj) - - def teardown(self): - teardown_class = getattr(self.obj, 'teardown_class', None) - if teardown_class is not None: - teardown_class = getattr(teardown_class, 'im_func', teardown_class) - teardown_class(self.obj) - -class Instance(PyCollectorMixin, py.test.collect.Collector): - def _getobj(self): - return self.parent.obj() - def Function(self): - return getattr(self.obj, 'Function', - PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2 - def _keywords(self): - return [] - Function = property(Function) - - #def __repr__(self): - # return "<%s of '%s'>" %(self.__class__.__name__, - # self.parent.obj.__name__) - - def newinstance(self): - self.obj = self._getobj() - return self.obj - -class FunctionMixin(PyobjMixin): - """ mixin for the code common to Function and Generator. - """ - - def setup(self): - """ perform setup for this test function. """ - if inspect.ismethod(self.obj): - name = 'setup_method' - else: - name = 'setup_function' - if isinstance(self.parent, Instance): - obj = self.parent.newinstance() - self.obj = self._getobj() - else: - obj = self.parent.obj - setup_func_or_method = getattr(obj, name, None) - if setup_func_or_method is not None: - setup_func_or_method(self.obj) - - def teardown(self): - """ perform teardown for this test function. """ - if inspect.ismethod(self.obj): - name = 'teardown_method' - else: - name = 'teardown_function' - obj = self.parent.obj - teardown_func_or_meth = getattr(obj, name, None) - if teardown_func_or_meth is not None: - teardown_func_or_meth(self.obj) - - def _prunetraceback(self, traceback): - if hasattr(self, '_obj') and not self.config.option.fulltrace: - code = py.code.Code(self.obj) - path, firstlineno = code.path, code.firstlineno - ntraceback = traceback.cut(path=path, firstlineno=firstlineno) - if ntraceback == traceback: - ntraceback = ntraceback.cut(path=path) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - - def _repr_failure_py(self, excinfo, style="long"): - if excinfo.errisinstance(funcargs.FuncargRequest.LookupError): - fspath, lineno, msg = self.reportinfo() - lines, _ = inspect.getsourcelines(self.obj) - for i, line in enumerate(lines): - if line.strip().startswith('def'): - return FuncargLookupErrorRepr(fspath, lineno, - lines[:i+1], str(excinfo.value)) - return super(FunctionMixin, self)._repr_failure_py(excinfo, - style=style) - - def repr_failure(self, excinfo, outerr=None): - assert outerr is None, "XXX outerr usage is deprecated" - return self._repr_failure_py(excinfo, - style=self.config.getvalue("tbstyle")) - - shortfailurerepr = "F" - -class FuncargLookupErrorRepr(TerminalRepr): - def __init__(self, filename, firstlineno, deflines, errorstring): - self.deflines = deflines - self.errorstring = errorstring - self.filename = filename - self.firstlineno = firstlineno - - def toterminal(self, tw): - tw.line() - for line in self.deflines: - tw.line(" " + line.strip()) - for line in self.errorstring.split("\n"): - tw.line(" " + line.strip(), red=True) - tw.line() - tw.line("%s:%d" % (self.filename, self.firstlineno+1)) - -class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): - def collect(self): - # test generators are seen as collectors but they also - # invoke setup/teardown on popular request - # (induced by the common "test_*" naming shared with normal tests) - self.config._setupstate.prepare(self) - l = [] - seen = {} - for i, x in enumerate(self.obj()): - name, call, args = self.getcallargs(x) - if not py.builtin.callable(call): - raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) - if name is None: - name = "[%d]" % i - else: - name = "['%s']" % name - if name in seen: - raise ValueError("%r generated tests with non-unique name %r" %(self, name)) - seen[name] = True - l.append(self.Function(name, self, args=args, callobj=call)) - return l - - def getcallargs(self, obj): - if not isinstance(obj, (tuple, list)): - obj = (obj,) - # explict naming - if isinstance(obj[0], py.builtin._basestring): - name = obj[0] - obj = obj[1:] - else: - name = None - call, args = obj[0], obj[1:] - return name, call, args - - -# -# Test Items -# -_dummy = object() -class Function(FunctionMixin, py.test.collect.Item): - """ a Function Item is responsible for setting up - and executing a Python callable test object. - """ - _genid = None - def __init__(self, name, parent=None, args=None, config=None, - callspec=None, callobj=_dummy): - super(Function, self).__init__(name, parent, config=config) - self._args = args - if self._isyieldedfunction(): - assert not callspec, "yielded functions (deprecated) cannot have funcargs" - else: - if callspec is not None: - self.funcargs = callspec.funcargs or {} - self._genid = callspec.id - if hasattr(callspec, "param"): - self._requestparam = callspec.param - else: - self.funcargs = {} - if callobj is not _dummy: - self._obj = callobj - self.function = getattr(self.obj, 'im_func', self.obj) - - def _getobj(self): - name = self.name - i = name.find("[") # parametrization - if i != -1: - name = name[:i] - return getattr(self.parent.obj, name) - - def _isyieldedfunction(self): - return self._args is not None - - def readkeywords(self): - d = super(Function, self).readkeywords() - d.update(py.builtin._getfuncdict(self.obj)) - return d - - def runtest(self): - """ execute the underlying test function. """ - self.ihook.pytest_pyfunc_call(pyfuncitem=self) - - def setup(self): - super(Function, self).setup() - if hasattr(self, 'funcargs'): - funcargs.fillfuncargs(self) - - def __eq__(self, other): - try: - return (self.name == other.name and - self._args == other._args and - self.parent == other.parent and - self.obj == other.obj and - getattr(self, '_genid', None) == - getattr(other, '_genid', None) - ) - except AttributeError: - pass - return False - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.parent, self.name)) - -def hasinit(obj): - init = getattr(obj, '__init__', None) - if init: - if init != object.__init__: - return True diff --git a/py/_plugin/pytest_skipping.py b/py/_plugin/pytest_skipping.py deleted file mode 100644 --- a/py/_plugin/pytest_skipping.py +++ /dev/null @@ -1,347 +0,0 @@ -""" -advanced skipping for python test functions, classes or modules. - -With this plugin you can mark test functions for conditional skipping -or as "xfail", expected-to-fail. Skipping a test will avoid running it -while xfail-marked tests will run and result in an inverted outcome: -a pass becomes a failure and a fail becomes a semi-passing one. - -The need for skipping a test is usually connected to a condition. -If a test fails under all conditions then it's probably better -to mark your test as 'xfail'. - -By passing ``-rxs`` to the terminal reporter you will see extra -summary information on skips and xfail-run tests at the end of a test run. - -.. _skipif: - -Skipping a single function -------------------------------------------- - -Here is an example for marking a test function to be skipped -when run on a Python3 interpreter:: - - @py.test.mark.skipif("sys.version_info >= (3,0)") - def test_function(): - ... - -During test function setup the skipif condition is -evaluated by calling ``eval(expr, namespace)``. The namespace -contains the ``sys`` and ``os`` modules and the test -``config`` object. The latter allows you to skip based -on a test configuration value e.g. like this:: - - @py.test.mark.skipif("not config.getvalue('db')") - def test_function(...): - ... - -Create a shortcut for your conditional skip decorator -at module level like this:: - - win32only = py.test.mark.skipif("sys.platform != 'win32'") - - @win32only - def test_function(): - ... - - -skip groups of test functions --------------------------------------- - -As with all metadata function marking you can do it at -`whole class- or module level`_. Here is an example -for skipping all methods of a test class based on platform:: - - class TestPosixCalls: - pytestmark = py.test.mark.skipif("sys.platform == 'win32'") - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -The ``pytestmark`` decorator will be applied to each test function. -If your code targets python2.6 or above you can equivalently use -the skipif decorator on classes:: - - @py.test.mark.skipif("sys.platform == 'win32'") - class TestPosixCalls: - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -It is fine in general to apply multiple "skipif" decorators -on a single function - this means that if any of the conditions -apply the function will be skipped. - -.. _`whole class- or module level`: mark.html#scoped-marking - - -mark a test function as **expected to fail** -------------------------------------------------------- - -You can use the ``xfail`` marker to indicate that you -expect the test to fail:: - - @py.test.mark.xfail - def test_function(): - ... - -This test will be run but no traceback will be reported -when it fails. Instead terminal reporting will list it in the -"expected to fail" or "unexpectedly passing" sections. - -Same as with skipif_ you can also selectively expect a failure -depending on platform:: - - @py.test.mark.xfail("sys.version_info >= (3,0)") - def test_function(): - ... - -To not run a test and still regard it as "xfailed":: - - @py.test.mark.xfail(..., run=False) - -To specify an explicit reason to be shown with xfailure detail:: - - @py.test.mark.xfail(..., reason="my reason") - -imperative xfail from within a test or setup function ------------------------------------------------------- - -If you cannot declare xfail-conditions at import time -you can also imperatively produce an XFail-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.xfail("unsuppored configuration") - - -skipping on a missing import dependency --------------------------------------------------- - -You can use the following import helper at module level -or within a test or test setup function:: - - docutils = py.test.importorskip("docutils") - -If ``docutils`` cannot be imported here, this will lead to a -skip outcome of the test. You can also skip dependeing if -if a library does not come with a high enough version:: - - docutils = py.test.importorskip("docutils", minversion="0.3") - -The version will be read from the specified module's ``__version__`` attribute. - -imperative skip from within a test or setup function ------------------------------------------------------- - -If for some reason you cannot declare skip-conditions -you can also imperatively produce a Skip-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.skip("unsuppored configuration") - -""" - -import py - -def pytest_addoption(parser): - group = parser.getgroup("general") - group.addoption('--runxfail', - action="store_true", dest="runxfail", default=False, - help="run tests even if they are marked xfail") - -class MarkEvaluator: - def __init__(self, item, name): - self.item = item - self.name = name - self.holder = getattr(item.obj, name, None) - - def __bool__(self): - return bool(self.holder) - __nonzero__ = __bool__ - - def istrue(self): - if self.holder: - d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} - if self.holder.args: - self.result = False - for expr in self.holder.args: - self.expr = expr - if isinstance(expr, str): - result = cached_eval(self.item.config, expr, d) - else: - result = expr - if result: - self.result = True - self.expr = expr - break - else: - self.result = True - return getattr(self, 'result', False) - - def get(self, attr, default=None): - return self.holder.kwargs.get(attr, default) - - def getexplanation(self): - expl = self.get('reason', None) - if not expl: - if not hasattr(self, 'expr'): - return "" - else: - return "condition: " + self.expr - return expl - - -def pytest_runtest_setup(item): - if not isinstance(item, py.test.collect.Function): - return - evalskip = MarkEvaluator(item, 'skipif') - if evalskip.istrue(): - py.test.skip(evalskip.getexplanation()) - item._evalxfail = MarkEvaluator(item, 'xfail') - if not item.config.getvalue("runxfail"): - if item._evalxfail.istrue(): - if not item._evalxfail.get('run', True): - py.test.skip("xfail") - -def pytest_runtest_makereport(__multicall__, item, call): - if not isinstance(item, py.test.collect.Function): - return - if not (call.excinfo and - call.excinfo.errisinstance(py.test.xfail.Exception)): - evalxfail = getattr(item, '_evalxfail', None) - if not evalxfail: - return - if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception): - if not item.config.getvalue("runxfail"): - rep = __multicall__.execute() - rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg - rep.skipped = True - rep.failed = False - return rep - if call.when == "setup": - rep = __multicall__.execute() - if rep.skipped and evalxfail.istrue(): - expl = evalxfail.getexplanation() - if not evalxfail.get("run", True): - expl = "[NOTRUN] " + expl - rep.keywords['xfail'] = expl - return rep - elif call.when == "call": - rep = __multicall__.execute() - if not item.config.getvalue("runxfail") and evalxfail.istrue(): - if call.excinfo: - rep.skipped = True - rep.failed = rep.passed = False - else: - rep.skipped = rep.passed = False - rep.failed = True - rep.keywords['xfail'] = evalxfail.getexplanation() - else: - if 'xfail' in rep.keywords: - del rep.keywords['xfail'] - return rep - -# called by terminalreporter progress reporting -def pytest_report_teststatus(report): - if 'xfail' in report.keywords: - if report.skipped: - return "xfailed", "x", "xfail" - elif report.failed: - return "xpassed", "X", "XPASS" - -# called by the terminalreporter instance/plugin -def pytest_terminal_summary(terminalreporter): - tr = terminalreporter - if not tr.reportchars: - #for name in "xfailed skipped failed xpassed": - # if not tr.stats.get(name, 0): - # tr.write_line("HINT: use '-r' option to see extra " - # "summary info about tests") - # break - return - - lines = [] - for char in tr.reportchars: - if char == "x": - show_xfailed(terminalreporter, lines) - elif char == "X": - show_xpassed(terminalreporter, lines) - elif char == "f": - show_failed(terminalreporter, lines) - elif char == "s": - show_skipped(terminalreporter, lines) - if lines: - tr._tw.sep("=", "short test summary info") - for line in lines: - tr._tw.line(line) - -def show_failed(terminalreporter, lines): - tw = terminalreporter._tw - failed = terminalreporter.stats.get("failed") - if failed: - for rep in failed: - pos = terminalreporter.gettestid(rep.item) - lines.append("FAIL %s" %(pos, )) - -def show_xfailed(terminalreporter, lines): - xfailed = terminalreporter.stats.get("xfailed") - if xfailed: - for rep in xfailed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XFAIL %s %s" %(pos, reason)) - -def show_xpassed(terminalreporter, lines): - xpassed = terminalreporter.stats.get("xpassed") - if xpassed: - for rep in xpassed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XPASS %s %s" %(pos, reason)) - -def cached_eval(config, expr, d): - if not hasattr(config, '_evalcache'): - config._evalcache = {} - try: - return config._evalcache[expr] - except KeyError: - #import sys - #print >>sys.stderr, ("cache-miss: %r" % expr) - config._evalcache[expr] = x = eval(expr, d) - return x - - -def folded_skips(skipped): - d = {} - for event in skipped: - entry = event.longrepr.reprcrash - key = entry.path, entry.lineno, entry.message - d.setdefault(key, []).append(event) - l = [] - for key, events in d.items(): - l.append((len(events),) + key) - return l - -def show_skipped(terminalreporter, lines): - tr = terminalreporter - skipped = tr.stats.get('skipped', []) - if skipped: - #if not tr.hasopt('skipped'): - # tr.write_line( - # "%d skipped tests, specify -rs for more info" % - # len(skipped)) - # return - fskips = folded_skips(skipped) - if fskips: - #tr.write_sep("_", "skipped test summary") - for num, fspath, lineno, reason in fskips: - if reason.startswith("Skipped: "): - reason = reason[9:] - lines.append("SKIP [%d] %s:%d: %s" % - (num, fspath, lineno, reason)) diff --git a/py/bin/env.py b/py/bin/env.py deleted file mode 100644 --- a/py/bin/env.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys, os, os.path - -progpath = sys.argv[0] -packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath))) -packagename = os.path.basename(packagedir) -bindir = os.path.join(packagedir, 'bin') -if sys.platform == 'win32': - bindir = os.path.join(bindir, 'win32') -rootdir = os.path.dirname(packagedir) - -def prepend_path(name, value): - sep = os.path.pathsep - curpath = os.environ.get(name, '') - newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ] - return setenv(name, sep.join(newpath)) - -def setenv(name, value): - shell = os.environ.get('SHELL', '') - comspec = os.environ.get('COMSPEC', '') - if shell.endswith('csh'): - cmd = 'setenv %s "%s"' % (name, value) - elif shell.endswith('sh'): - cmd = '%s="%s"; export %s' % (name, value, name) - elif comspec.endswith('cmd.exe'): - cmd = 'set %s=%s' % (name, value) - else: - assert False, 'Shell not supported.' - return cmd - -print(prepend_path('PATH', bindir)) -print(prepend_path('PYTHONPATH', rootdir)) diff --git a/py/_plugin/pytest_nose.py b/py/_plugin/pytest_nose.py deleted file mode 100644 --- a/py/_plugin/pytest_nose.py +++ /dev/null @@ -1,98 +0,0 @@ -"""nose-compatibility plugin: allow to run nose test suites natively. - -This is an experimental plugin for allowing to run tests written -in 'nosetests style with py.test. - -Usage -------------- - -type:: - - py.test # instead of 'nosetests' - -and you should be able to run nose style tests and at the same -time can make full use of py.test's capabilities. - -Supported nose Idioms ----------------------- - -* setup and teardown at module/class/method level -* SkipTest exceptions and markers -* setup/teardown decorators -* yield-based tests and their setup -* general usage of nose utilities - -Unsupported idioms / issues ----------------------------------- - -- nose-style doctests are not collected and executed correctly, - also fixtures don't work. - -- no nose-configuration is recognized - -If you find other issues or have suggestions please run:: - - py.test --pastebin=all - -and send the resulting URL to a py.test contact channel, -at best to the mailing list. -""" -import py -import inspect -import sys - -def pytest_runtest_makereport(__multicall__, item, call): - SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None) - if SkipTest: - if call.excinfo and call.excinfo.errisinstance(SkipTest): - # let's substitute the excinfo with a py.test.skip one - call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when) - call.excinfo = call2.excinfo - -def pytest_report_iteminfo(item): - # nose 0.11.1 uses decorators for "raises" and other helpers. - # for reporting progress by filename we fish for the filename - if isinstance(item, py.test.collect.Function): - obj = item.obj - if hasattr(obj, 'compat_co_firstlineno'): - fn = sys.modules[obj.__module__].__file__ - if fn.endswith(".pyc"): - fn = fn[:-1] - #assert 0 - #fn = inspect.getsourcefile(obj) or inspect.getfile(obj) - lineno = obj.compat_co_firstlineno - return py.path.local(fn), lineno, obj.__module__ - -def pytest_runtest_setup(item): - if isinstance(item, (py.test.collect.Function)): - if isinstance(item.parent, py.test.collect.Generator): - gen = item.parent - if not hasattr(gen, '_nosegensetup'): - call_optional(gen.obj, 'setup') - if isinstance(gen.parent, py.test.collect.Instance): - call_optional(gen.parent.obj, 'setup') - gen._nosegensetup = True - if not call_optional(item.obj, 'setup'): - # call module level setup if there is no object level one - call_optional(item.parent.obj, 'setup') - -def pytest_runtest_teardown(item): - if isinstance(item, py.test.collect.Function): - if not call_optional(item.obj, 'teardown'): - call_optional(item.parent.obj, 'teardown') - #if hasattr(item.parent, '_nosegensetup'): - # #call_optional(item._nosegensetup, 'teardown') - # del item.parent._nosegensetup - -def pytest_make_collect_report(collector): - if isinstance(collector, py.test.collect.Generator): - call_optional(collector.obj, 'setup') - -def call_optional(obj, name): - method = getattr(obj, name, None) - if method: - ismethod = inspect.ismethod(method) - rawcode = py.code.getrawcode(method) - if not rawcode.co_varnames[ismethod:]: - method() - return True diff --git a/py/_plugin/pytest_pytester.py b/py/_plugin/pytest_pytester.py deleted file mode 100644 --- a/py/_plugin/pytest_pytester.py +++ /dev/null @@ -1,500 +0,0 @@ -""" -funcargs and support code for testing py.test's own functionality. -""" - -import py -import sys, os -import re -import inspect -import time -from py._test.config import Config as pytestConfig -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("pylib") - group.addoption('--tools-on-path', - action="store_true", dest="toolsonpath", default=False, - help=("discover tools on PATH instead of going through py.cmdline.") - ) - -pytest_plugins = '_pytest' - -def pytest_funcarg__linecomp(request): - return LineComp() - -def pytest_funcarg__LineMatcher(request): - return LineMatcher - -def pytest_funcarg__testdir(request): - tmptestdir = TmpTestdir(request) - return tmptestdir - -rex_outcome = re.compile("(\d+) (\w+)") -class RunResult: - def __init__(self, ret, outlines, errlines, duration): - self.ret = ret - self.outlines = outlines - self.errlines = errlines - self.stdout = LineMatcher(outlines) - self.stderr = LineMatcher(errlines) - self.duration = duration - - def parseoutcomes(self): - for line in reversed(self.outlines): - if 'seconds' in line: - outcomes = rex_outcome.findall(line) - if outcomes: - d = {} - for num, cat in outcomes: - d[cat] = int(num) - return d - -class TmpTestdir: - def __init__(self, request): - self.request = request - self._pytest = request.getfuncargvalue("_pytest") - # XXX remove duplication with tmpdir plugin - basetmp = request.config.ensuretemp("testdir") - name = request.function.__name__ - for i in range(100): - try: - tmpdir = basetmp.mkdir(name + str(i)) - except py.error.EEXIST: - continue - break - # we need to create another subdir - # because Directory.collect() currently loads - # conftest.py from sibling directories - self.tmpdir = tmpdir.mkdir(name) - self.plugins = [] - self._syspathremove = [] - self.chdir() # always chdir - self.request.addfinalizer(self.finalize) - - def __repr__(self): - return "" % (self.tmpdir,) - - def Config(self, topdir=None): - if topdir is None: - topdir = self.tmpdir.dirpath() - return pytestConfig(topdir=topdir) - - def finalize(self): - for p in self._syspathremove: - py.std.sys.path.remove(p) - if hasattr(self, '_olddir'): - self._olddir.chdir() - # delete modules that have been loaded from tmpdir - for name, mod in list(sys.modules.items()): - if mod: - fn = getattr(mod, '__file__', None) - if fn and fn.startswith(str(self.tmpdir)): - del sys.modules[name] - - def getreportrecorder(self, obj): - if hasattr(obj, 'config'): - obj = obj.config - if hasattr(obj, 'hook'): - obj = obj.hook - assert hasattr(obj, '_hookspecs'), obj - reprec = ReportRecorder(obj) - reprec.hookrecorder = self._pytest.gethookrecorder(obj) - reprec.hook = reprec.hookrecorder.hook - return reprec - - def chdir(self): - old = self.tmpdir.chdir() - if not hasattr(self, '_olddir'): - self._olddir = old - - def _makefile(self, ext, args, kwargs): - items = list(kwargs.items()) - if args: - source = "\n".join(map(str, args)) + "\n" - basename = self.request.function.__name__ - items.insert(0, (basename, source)) - ret = None - for name, value in items: - p = self.tmpdir.join(name).new(ext=ext) - source = str(py.code.Source(value)).lstrip() - p.write(source.encode("utf-8"), "wb") - if ret is None: - ret = p - return ret - - - def makefile(self, ext, *args, **kwargs): - return self._makefile(ext, args, kwargs) - - def makeconftest(self, source): - return self.makepyfile(conftest=source) - - def makepyfile(self, *args, **kwargs): - return self._makefile('.py', args, kwargs) - - def maketxtfile(self, *args, **kwargs): - return self._makefile('.txt', args, kwargs) - - def syspathinsert(self, path=None): - if path is None: - path = self.tmpdir - py.std.sys.path.insert(0, str(path)) - self._syspathremove.append(str(path)) - - def mkdir(self, name): - return self.tmpdir.mkdir(name) - - def mkpydir(self, name): - p = self.mkdir(name) - p.ensure("__init__.py") - return p - - def genitems(self, colitems): - return list(self.session.genitems(colitems)) - - def inline_genitems(self, *args): - #config = self.parseconfig(*args) - config = self.parseconfig(*args) - session = config.initsession() - rec = self.getreportrecorder(config) - colitems = [config.getnode(arg) for arg in config.args] - items = list(session.genitems(colitems)) - return items, rec - - def runitem(self, source): - # used from runner functional tests - item = self.getitem(source) - # the test class where we are called from wants to provide the runner - testclassinstance = py.builtin._getimself(self.request.function) - runner = testclassinstance.getrunner() - return runner(item) - - def inline_runsource(self, source, *cmdlineargs): - p = self.makepyfile(source) - l = list(cmdlineargs) + [p] - return self.inline_run(*l) - - def inline_runsource1(self, *args): - args = list(args) - source = args.pop() - p = self.makepyfile(source) - l = list(args) + [p] - reprec = self.inline_run(*l) - reports = reprec.getreports("pytest_runtest_logreport") - assert len(reports) == 1, reports - return reports[0] - - def inline_run(self, *args): - args = ("-s", ) + args # otherwise FD leakage - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - session = config.initsession() - reprec = self.getreportrecorder(config) - colitems = config.getinitialnodes() - session.main(colitems) - config.pluginmanager.do_unconfigure(config) - return reprec - - def config_preparse(self): - config = self.Config() - for plugin in self.plugins: - if isinstance(plugin, str): - config.pluginmanager.import_plugin(plugin) - else: - if isinstance(plugin, dict): - plugin = PseudoPlugin(plugin) - if not config.pluginmanager.isregistered(plugin): - config.pluginmanager.register(plugin) - return config - - def parseconfig(self, *args): - if not args: - args = (self.tmpdir,) - config = self.config_preparse() - args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')] - config.parse(args) - return config - - def reparseconfig(self, args=None): - """ this is used from tests that want to re-invoke parse(). """ - if not args: - args = [self.tmpdir] - from py._test import config - oldconfig = config.config_per_process # py.test.config - try: - c = config.config_per_process = py.test.config = pytestConfig() - c.basetemp = oldconfig.mktemp("reparse", numbered=True) - c.parse(args) - return c - finally: - config.config_per_process = py.test.config = oldconfig - - def parseconfigure(self, *args): - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - return config - - def getitem(self, source, funcname="test_func"): - modcol = self.getmodulecol(source) - moditems = modcol.collect() - for item in modcol.collect(): - if item.name == funcname: - return item - else: - assert 0, "%r item not found in module:\n%s" %(funcname, source) - - def getitems(self, source): - modcol = self.getmodulecol(source) - return list(modcol.config.initsession().genitems([modcol])) - #assert item is not None, "%r item not found in module:\n%s" %(funcname, source) - #return item - - def getfscol(self, path, configargs=()): - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - return self.config.getnode(path) - - def getmodulecol(self, source, configargs=(), withinit=False): - kw = {self.request.function.__name__: py.code.Source(source).strip()} - path = self.makepyfile(**kw) - if withinit: - self.makepyfile(__init__ = "#") - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - #self.config.pluginmanager.do_configure(config=self.config) - # XXX - self.config.pluginmanager.import_plugin("runner") - plugin = self.config.pluginmanager.getplugin("runner") - plugin.pytest_configure(config=self.config) - - return self.config.getnode(path) - - def popen(self, cmdargs, stdout, stderr, **kw): - if not hasattr(py.std, 'subprocess'): - py.test.skip("no subprocess module") - env = os.environ.copy() - env['PYTHONPATH'] = ":".join(filter(None, [ - str(os.getcwd()), env.get('PYTHONPATH', '')])) - kw['env'] = env - #print "env", env - return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) - - def run(self, *cmdargs): - return self._run(*cmdargs) - - def _run(self, *cmdargs): - cmdargs = [str(x) for x in cmdargs] - p1 = self.tmpdir.join("stdout") - p2 = self.tmpdir.join("stderr") - print_("running", cmdargs, "curdir=", py.path.local()) - f1 = p1.open("wb") - f2 = p2.open("wb") - now = time.time() - popen = self.popen(cmdargs, stdout=f1, stderr=f2, - close_fds=(sys.platform != "win32")) - ret = popen.wait() - f1.close() - f2.close() - out = p1.read("rb") - out = getdecoded(out).splitlines() - err = p2.read("rb") - err = getdecoded(err).splitlines() - def dump_lines(lines, fp): - try: - for line in lines: - py.builtin.print_(line, file=fp) - except UnicodeEncodeError: - print("couldn't print to %s because of encoding" % (fp,)) - dump_lines(out, sys.stdout) - dump_lines(err, sys.stderr) - return RunResult(ret, out, err, time.time()-now) - - def runpybin(self, scriptname, *args): - fullargs = self._getpybinargs(scriptname) + args - return self.run(*fullargs) - - def _getpybinargs(self, scriptname): - if self.request.config.getvalue("toolsonpath"): - script = py.path.local.sysfind(scriptname) - assert script, "script %r not found" % scriptname - return (script,) - else: - cmdlinename = scriptname.replace(".", "") - assert hasattr(py.cmdline, cmdlinename), cmdlinename - source = ("import sys;sys.path.insert(0,%r);" - "import py;py.cmdline.%s()" % - (str(py._pydir.dirpath()), cmdlinename)) - return (sys.executable, "-c", source,) - - def runpython(self, script): - s = self._getsysprepend() - if s: - script.write(s + "\n" + script.read()) - return self.run(sys.executable, script) - - def _getsysprepend(self): - if not self.request.config.getvalue("toolsonpath"): - s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath()) - else: - s = "" - return s - - def runpython_c(self, command): - command = self._getsysprepend() + command - return self.run(py.std.sys.executable, "-c", command) - - def runpytest(self, *args): - p = py.path.local.make_numbered_dir(prefix="runpytest-", - keep=None, rootdir=self.tmpdir) - args = ('--basetemp=%s' % p, ) + args - plugins = [x for x in self.plugins if isinstance(x, str)] - if plugins: - args = ('-p', plugins[0]) + args - return self.runpybin("py.test", *args) - - def spawn_pytest(self, string, expect_timeout=10.0): - pexpect = py.test.importorskip("pexpect", "2.4") - if not self.request.config.getvalue("toolsonpath"): - py.test.skip("need --tools-on-path to run py.test script") - basetemp = self.tmpdir.mkdir("pexpect") - invoke = self._getpybinargs("py.test")[0] - cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) - child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w")) - child.timeout = expect_timeout - return child - -def getdecoded(out): - try: - return out.decode("utf-8") - except UnicodeDecodeError: - return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( - py.io.saferepr(out),) - -class PseudoPlugin: - def __init__(self, vars): - self.__dict__.update(vars) - -class ReportRecorder(object): - def __init__(self, hook): - self.hook = hook - self.registry = hook._registry - self.registry.register(self) - - def getcall(self, name): - return self.hookrecorder.getcall(name) - - def popcall(self, name): - return self.hookrecorder.popcall(name) - - def getcalls(self, names): - """ return list of ParsedCall instances matching the given eventname. """ - return self.hookrecorder.getcalls(names) - - # functionality for test reports - - def getreports(self, names="pytest_runtest_logreport pytest_collectreport"): - return [x.report for x in self.getcalls(names)] - - def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"): - """ return a testreport whose dotted import path matches """ - l = [] - for rep in self.getreports(names=names): - colitem = rep.getnode() - if not inamepart or inamepart in colitem.listnames(): - l.append(rep) - if not l: - raise ValueError("could not find test report matching %r: no test reports at all!" % - (inamepart,)) - if len(l) > 1: - raise ValueError("found more than one testreport matching %r: %s" %( - inamepart, l)) - return l[0] - - def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'): - return [rep for rep in self.getreports(names) if rep.failed] - - def getfailedcollections(self): - return self.getfailures('pytest_collectreport') - - def listoutcomes(self): - passed = [] - skipped = [] - failed = [] - for rep in self.getreports("pytest_runtest_logreport"): - if rep.passed: - if rep.when == "call": - passed.append(rep) - elif rep.skipped: - skipped.append(rep) - elif rep.failed: - failed.append(rep) - return passed, skipped, failed - - def countoutcomes(self): - return [len(x) for x in self.listoutcomes()] - - def assertoutcome(self, passed=0, skipped=0, failed=0): - realpassed, realskipped, realfailed = self.listoutcomes() - assert passed == len(realpassed) - assert skipped == len(realskipped) - assert failed == len(realfailed) - - def clear(self): - self.hookrecorder.calls[:] = [] - - def unregister(self): - self.registry.unregister(self) - self.hookrecorder.finish_recording() - -class LineComp: - def __init__(self): - self.stringio = py.io.TextIO() - - def assert_contains_lines(self, lines2): - """ assert that lines2 are contained (linearly) in lines1. - return a list of extralines found. - """ - __tracebackhide__ = True - val = self.stringio.getvalue() - self.stringio.truncate(0) - self.stringio.seek(0) - lines1 = val.split("\n") - return LineMatcher(lines1).fnmatch_lines(lines2) - -class LineMatcher: - def __init__(self, lines): - self.lines = lines - - def str(self): - return "\n".join(self.lines) - - def fnmatch_lines(self, lines2): - if isinstance(lines2, str): - lines2 = py.code.Source(lines2) - if isinstance(lines2, py.code.Source): - lines2 = lines2.strip().lines - - from fnmatch import fnmatch - lines1 = self.lines[:] - nextline = None - extralines = [] - __tracebackhide__ = True - for line in lines2: - nomatchprinted = False - while lines1: - nextline = lines1.pop(0) - if line == nextline: - print_("exact match:", repr(line)) - break - elif fnmatch(nextline, line): - print_("fnmatch:", repr(line)) - print_(" with:", repr(nextline)) - break - else: - if not nomatchprinted: - print_("nomatch:", repr(line)) - nomatchprinted = True - print_(" and:", repr(nextline)) - extralines.append(nextline) - else: - assert line == nextline diff --git a/py/_plugin/pytest_monkeypatch.py b/py/_plugin/pytest_monkeypatch.py deleted file mode 100644 --- a/py/_plugin/pytest_monkeypatch.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -safely patch object attributes, dicts and environment variables. - -Usage ----------------- - -Use the `monkeypatch funcarg`_ to tweak your global test environment -for running a particular test. You can safely set/del an attribute, -dictionary item or environment variable by respective methods -on the monkeypatch funcarg. If you want e.g. to set an ENV1 variable -and have os.path.expanduser return a particular directory, you can -write it down like this: - -.. sourcecode:: python - - def test_mytest(monkeypatch): - monkeypatch.setenv('ENV1', 'myval') - monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz') - ... # your test code that uses those patched values implicitely - -After the test function finished all modifications will be undone, -because the ``monkeypatch.undo()`` method is registered as a finalizer. - -``monkeypatch.setattr/delattr/delitem/delenv()`` all -by default raise an Exception if the target does not exist. -Pass ``raising=False`` if you want to skip this check. - -prepending to PATH or other environment variables ---------------------------------------------------------- - -To prepend a value to an already existing environment parameter: - -.. sourcecode:: python - - def test_mypath_finding(monkeypatch): - monkeypatch.setenv('PATH', 'x/y', prepend=":") - # in bash language: export PATH=x/y:$PATH - -calling "undo" finalization explicitely ------------------------------------------ - -At the end of function execution py.test invokes -a teardown hook which undoes all monkeypatch changes. -If you do not want to wait that long you can call -finalization explicitely:: - - monkeypatch.undo() - -This will undo previous changes. This call consumes the -undo stack. Calling it a second time has no effect unless -you start monkeypatching after the undo call. - -.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/ -""" - -import py, os, sys - -def pytest_funcarg__monkeypatch(request): - """The returned ``monkeypatch`` funcarg provides these - helper methods to modify objects, dictionaries or os.environ:: - - monkeypatch.setattr(obj, name, value, raising=True) - monkeypatch.delattr(obj, name, raising=True) - monkeypatch.setitem(mapping, name, value) - monkeypatch.delitem(obj, name, raising=True) - monkeypatch.setenv(name, value, prepend=False) - monkeypatch.delenv(name, value, raising=True) - monkeypatch.syspath_prepend(path) - - All modifications will be undone when the requesting - test function finished its execution. The ``raising`` - parameter determines if a KeyError or AttributeError - will be raised if the set/deletion operation has no target. - """ - monkeypatch = MonkeyPatch() - request.addfinalizer(monkeypatch.undo) - return monkeypatch - -notset = object() - -class MonkeyPatch: - def __init__(self): - self._setattr = [] - self._setitem = [] - - def setattr(self, obj, name, value, raising=True): - oldval = getattr(obj, name, notset) - if raising and oldval is notset: - raise AttributeError("%r has no attribute %r" %(obj, name)) - self._setattr.insert(0, (obj, name, oldval)) - setattr(obj, name, value) - - def delattr(self, obj, name, raising=True): - if not hasattr(obj, name): - if raising: - raise AttributeError(name) - else: - self._setattr.insert(0, (obj, name, getattr(obj, name, notset))) - delattr(obj, name) - - def setitem(self, dic, name, value): - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - dic[name] = value - - def delitem(self, dic, name, raising=True): - if name not in dic: - if raising: - raise KeyError(name) - else: - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - del dic[name] - - def setenv(self, name, value, prepend=None): - value = str(value) - if prepend and name in os.environ: - value = value + prepend + os.environ[name] - self.setitem(os.environ, name, value) - - def delenv(self, name, raising=True): - self.delitem(os.environ, name, raising=raising) - - def syspath_prepend(self, path): - if not hasattr(self, '_savesyspath'): - self._savesyspath = sys.path[:] - sys.path.insert(0, str(path)) - - def undo(self): - for obj, name, value in self._setattr: - if value is not notset: - setattr(obj, name, value) - else: - delattr(obj, name) - self._setattr[:] = [] - for dictionary, name, value in self._setitem: - if value is notset: - del dictionary[name] - else: - dictionary[name] = value - self._setitem[:] = [] - if hasattr(self, '_savesyspath'): - sys.path[:] = self._savesyspath diff --git a/py/_plugin/hookspec.py b/py/_plugin/hookspec.py deleted file mode 100644 --- a/py/_plugin/hookspec.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -hook specifications for py.test plugins -""" - -# ------------------------------------------------------------------------- -# Command line and configuration -# ------------------------------------------------------------------------- - -def pytest_namespace(): - "return dict of name->object which will get stored at py.test. namespace" - -def pytest_addoption(parser): - "add optparse-style options via parser.addoption." - -def pytest_addhooks(pluginmanager): - "add hooks via pluginmanager.registerhooks(module)" - -def pytest_configure(config): - """ called after command line options have been parsed. - and all plugins and initial conftest files been loaded. - """ - -def pytest_unconfigure(config): - """ called before test process is exited. """ - -# ------------------------------------------------------------------------- -# collection hooks -# ------------------------------------------------------------------------- - -def pytest_ignore_collect(path, config): - """ return true value to prevent considering this path for collection. - This hook is consulted for all files and directories prior to considering - collection hooks. - """ -pytest_ignore_collect.firstresult = True - -def pytest_collect_directory(path, parent): - """ return Collection node or None for the given path. """ -pytest_collect_directory.firstresult = True - -def pytest_collect_file(path, parent): - """ return Collection node or None for the given path. """ - -def pytest_collectstart(collector): - """ collector starts collecting. """ - -def pytest_collectreport(report): - """ collector finished collecting. """ - -def pytest_deselected(items): - """ called for test items deselected by keyword. """ - -def pytest_make_collect_report(collector): - """ perform a collection and return a collection. """ -pytest_make_collect_report.firstresult = True - -# XXX rename to item_collected()? meaning in distribution context? -def pytest_itemstart(item, node=None): - """ test item gets collected. """ - -# ------------------------------------------------------------------------- -# Python test function related hooks -# ------------------------------------------------------------------------- - -def pytest_pycollect_makemodule(path, parent): - """ return a Module collector or None for the given path. - This hook will be called for each matching test module path. - The pytest_collect_file hook needs to be used if you want to - create test modules for files that do not match as a test module. - """ -pytest_pycollect_makemodule.firstresult = True - -def pytest_pycollect_makeitem(collector, name, obj): - """ return custom item/collector for a python object in a module, or None. """ -pytest_pycollect_makeitem.firstresult = True - -def pytest_pyfunc_call(pyfuncitem): - """ call underlying test function. """ -pytest_pyfunc_call.firstresult = True - -def pytest_generate_tests(metafunc): - """ generate (multiple) parametrized calls to a test function.""" - -# ------------------------------------------------------------------------- -# generic runtest related hooks -# ------------------------------------------------------------------------- - -def pytest_runtest_protocol(item): - """ implement fixture, run and report about the given test item. """ -pytest_runtest_protocol.firstresult = True - -def pytest_runtest_setup(item): - """ called before pytest_runtest_call(). """ - -def pytest_runtest_call(item): - """ execute test item. """ - -def pytest_runtest_teardown(item): - """ called after pytest_runtest_call(). """ - -def pytest_runtest_makereport(item, call): - """ make a test report for the given item and call outcome. """ -pytest_runtest_makereport.firstresult = True - -def pytest_runtest_logreport(report): - """ process item test report. """ - -# special handling for final teardown - somewhat internal for now -def pytest__teardown_final(session): - """ called before test session finishes. """ -pytest__teardown_final.firstresult = True - -def pytest__teardown_final_logerror(report): - """ called if runtest_teardown_final failed. """ - -# ------------------------------------------------------------------------- -# test session related hooks -# ------------------------------------------------------------------------- - -def pytest_sessionstart(session): - """ before session.main() is called. """ - -def pytest_sessionfinish(session, exitstatus): - """ whole test run finishes. """ - -# ------------------------------------------------------------------------- -# hooks for influencing reporting (invoked from pytest_terminal) -# ------------------------------------------------------------------------- - -def pytest_report_header(config): - """ return a string to be displayed as header info for terminal reporting.""" - -def pytest_report_teststatus(report): - """ return result-category, shortletter and verbose word for reporting.""" -pytest_report_teststatus.firstresult = True - -def pytest_terminal_summary(terminalreporter): - """ add additional section in terminal summary reporting. """ - -def pytest_report_iteminfo(item): - """ return (fspath, lineno, name) for the item. - the information is used for result display and to sort tests - """ -pytest_report_iteminfo.firstresult = True - -# ------------------------------------------------------------------------- -# doctest hooks -# ------------------------------------------------------------------------- - -def pytest_doctest_prepare_content(content): - """ return processed content for a given doctest""" -pytest_doctest_prepare_content.firstresult = True - - -# ------------------------------------------------------------------------- -# error handling and internal debugging hooks -# ------------------------------------------------------------------------- - -def pytest_plugin_registered(plugin, manager): - """ a new py lib plugin got registered. """ - -def pytest_plugin_unregistered(plugin): - """ a py lib plugin got unregistered. """ - -def pytest_internalerror(excrepr): - """ called for internal errors. """ - -def pytest_keyboard_interrupt(excinfo): - """ called for keyboard interrupt. """ - -def pytest_trace(category, msg): - """ called for debug info. """ diff --git a/py/bin/py.lookup b/py/bin/py.lookup deleted file mode 100755 --- a/py/bin/py.lookup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pylookup() \ No newline at end of file diff --git a/py/_cmdline/pysvnwcrevert.py b/py/_cmdline/pysvnwcrevert.py deleted file mode 100755 --- a/py/_cmdline/pysvnwcrevert.py +++ /dev/null @@ -1,55 +0,0 @@ -#! /usr/bin/env python -"""\ -py.svnwcrevert [options] WCPATH - -Running this script and then 'svn up' puts the working copy WCPATH in a state -as clean as a fresh check-out. - -WARNING: you'll loose all local changes, obviously! - -This script deletes all files that have been modified -or that svn doesn't explicitly know about, including svn:ignored files -(like .pyc files, hint hint). - -The goal of this script is to leave the working copy with some files and -directories possibly missing, but - most importantly - in a state where -the following 'svn up' won't just crash. -""" - -import sys, py - -def kill(p, root): - print('< %s' % (p.relto(root),)) - p.remove(rec=1) - -def svnwcrevert(path, root=None, precious=[]): - if root is None: - root = path - wcpath = py.path.svnwc(path) - try: - st = wcpath.status() - except ValueError: # typically, "bad char in wcpath" - kill(path, root) - return - for p in path.listdir(): - if p.basename == '.svn' or p.basename in precious: - continue - wcp = py.path.svnwc(p) - if wcp not in st.unchanged and wcp not in st.external: - kill(p, root) - elif p.check(dir=1): - svnwcrevert(p, root) - -# XXX add a functional test - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-p", "--precious", - action="append", dest="precious", default=[], - help="preserve files with this name") - -def main(): - opts, args = parser.parse_args() - if len(args) != 1: - parser.print_help() - sys.exit(2) - svnwcrevert(py.path.local(args[0]), precious=opts.precious) diff --git a/py/_plugin/__init__.py b/py/_plugin/__init__.py deleted file mode 100644 --- a/py/_plugin/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_plugin/pytest_mark.py b/py/_plugin/pytest_mark.py deleted file mode 100644 --- a/py/_plugin/pytest_mark.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -generic mechanism for marking python functions. - -By using the ``py.test.mark`` helper you can instantiate -decorators that will set named meta data on test functions. - -Marking a single function ----------------------------------------------------- - -You can "mark" a test function with meta data like this:: - - @py.test.mark.webtest - def test_send_http(): - ... - -This will set a "Marker" instance as a function attribute named "webtest". -You can also specify parametrized meta data like this:: - - @py.test.mark.webtest(firefox=30) - def test_receive(): - ... - -The named marker can be accessed like this later:: - - test_receive.webtest.kwargs['firefox'] == 30 - -In addition to set key-value pairs you can also use positional arguments:: - - @py.test.mark.webtest("triangular") - def test_receive(): - ... - -and later access it with ``test_receive.webtest.args[0] == 'triangular``. - -.. _`scoped-marking`: - -Marking whole classes or modules ----------------------------------------------------- - -If you are programming with Python2.6 you may use ``py.test.mark`` decorators -with classes to apply markers to all its test methods:: - - @py.test.mark.webtest - class TestClass: - def test_startup(self): - ... - def test_startup_and_more(self): - ... - -This is equivalent to directly applying the decorator to the -two test functions. - -To remain compatible with Python2.5 you can also set a -``pytestmark`` attribute on a TestClass like this:: - - import py - - class TestClass: - pytestmark = py.test.mark.webtest - -or if you need to use multiple markers you can use a list:: - - import py - - class TestClass: - pytestmark = [py.test.mark.webtest, pytest.mark.slowtest] - -You can also set a module level marker:: - - import py - pytestmark = py.test.mark.webtest - -in which case it will be applied to all functions and -methods defined in the module. - -Using "-k MARKNAME" to select tests ----------------------------------------------------- - -You can use the ``-k`` command line option to select -tests:: - - py.test -k webtest # will only run tests marked as webtest - -""" -import py - -def pytest_namespace(): - return {'mark': MarkGenerator()} - -class MarkGenerator: - """ non-underscore attributes of this object can be used as decorators for - marking test functions. Example: @py.test.mark.slowtest in front of a - function will set the 'slowtest' marker object on it. """ - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - return MarkDecorator(name) - -class MarkDecorator: - """ decorator for setting function attributes. """ - def __init__(self, name): - self.markname = name - self.kwargs = {} - self.args = [] - - def __repr__(self): - d = self.__dict__.copy() - name = d.pop('markname') - return "" %(name, d) - - def __call__(self, *args, **kwargs): - """ if passed a single callable argument: decorate it with mark info. - otherwise add *args/**kwargs in-place to mark information. """ - if args: - func = args[0] - if len(args) == 1 and hasattr(func, '__call__') or \ - hasattr(func, '__bases__'): - if hasattr(func, '__bases__'): - if hasattr(func, 'pytestmark'): - l = func.pytestmark - if not isinstance(l, list): - func.pytestmark = [l, self] - else: - l.append(self) - else: - func.pytestmark = [self] - else: - holder = getattr(func, self.markname, None) - if holder is None: - holder = MarkInfo(self.markname, self.args, self.kwargs) - setattr(func, self.markname, holder) - else: - holder.kwargs.update(self.kwargs) - holder.args.extend(self.args) - return func - else: - self.args.extend(args) - self.kwargs.update(kwargs) - return self - -class MarkInfo: - def __init__(self, name, args, kwargs): - self._name = name - self.args = args - self.kwargs = kwargs - - def __getattr__(self, name): - if name[0] != '_' and name in self.kwargs: - py.log._apiwarn("1.1", "use .kwargs attribute to access key-values") - return self.kwargs[name] - raise AttributeError(name) - - def __repr__(self): - return "" % ( - self._name, self.args, self.kwargs) - - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - item = __multicall__.execute() - if isinstance(item, py.test.collect.Function): - cls = collector.getparent(py.test.collect.Class) - mod = collector.getparent(py.test.collect.Module) - func = item.obj - func = getattr(func, '__func__', func) # py3 - func = getattr(func, 'im_func', func) # py2 - for parent in [x for x in (mod, cls) if x]: - marker = getattr(parent.obj, 'pytestmark', None) - if marker is not None: - if not isinstance(marker, list): - marker = [marker] - for mark in marker: - if isinstance(mark, MarkDecorator): - mark(func) - return item diff --git a/py/bin/py.convert_unittest b/py/bin/py.convert_unittest deleted file mode 100755 --- a/py/bin/py.convert_unittest +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pyconvert_unittest() \ No newline at end of file diff --git a/py/_plugin/pytest_tmpdir.py b/py/_plugin/pytest_tmpdir.py deleted file mode 100644 --- a/py/_plugin/pytest_tmpdir.py +++ /dev/null @@ -1,22 +0,0 @@ -"""provide temporary directories to test functions. - -usage example:: - - def test_plugin(tmpdir): - tmpdir.join("hello").write("hello") - -.. _`py.path.local`: ../../path.html - -""" -import py - -def pytest_funcarg__tmpdir(request): - """return a temporary directory path object - unique to each test function invocation, - created as a sub directory of the base temporary - directory. The returned object is a `py.path.local`_ - path object. - """ - name = request.function.__name__ - x = request.config.mktemp(name, numbered=True) - return x.realpath() diff --git a/py/bin/win32/py.convert_unittest.cmd b/py/bin/win32/py.convert_unittest.cmd deleted file mode 100644 --- a/py/bin/win32/py.convert_unittest.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.convert_unittest" %* \ No newline at end of file diff --git a/py/_path/gateway/__init__.py b/py/_path/gateway/__init__.py deleted file mode 100644 --- a/py/_path/gateway/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/bin/win32/py.svnwcrevert.cmd b/py/bin/win32/py.svnwcrevert.cmd deleted file mode 100644 --- a/py/bin/win32/py.svnwcrevert.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.svnwcrevert" %* \ No newline at end of file diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py deleted file mode 100644 --- a/py/_plugin/pytest_restdoc.py +++ /dev/null @@ -1,433 +0,0 @@ -""" -perform ReST syntax, local and remote reference tests on .rst/.txt files. -""" -import py -import sys, os, re - -def pytest_addoption(parser): - group = parser.getgroup("ReST", "ReST documentation check options") - group.addoption('-R', '--urlcheck', - action="store_true", dest="urlcheck", default=False, - help="urlopen() remote links found in ReST text files.") - group.addoption('--urltimeout', action="store", metavar="secs", - type="int", dest="urlcheck_timeout", default=5, - help="timeout in seconds for remote urlchecks") - group.addoption('--forcegen', - action="store_true", dest="forcegen", default=False, - help="force generation of html files.") - -def pytest_collect_file(path, parent): - if path.ext in (".txt", ".rst"): - project = getproject(path) - if project is not None: - return ReSTFile(path, parent=parent, project=project) - -def getproject(path): - for parent in path.parts(reverse=True): - confrest = parent.join("confrest.py") - if confrest.check(): - Project = confrest.pyimport().Project - return Project(parent) - -class ReSTFile(py.test.collect.File): - def __init__(self, fspath, parent, project): - super(ReSTFile, self).__init__(fspath=fspath, parent=parent) - self.project = project - - def collect(self): - return [ - ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project), - LinkCheckerMaker("checklinks", parent=self), - DoctestText("doctest", parent=self), - ] - -def deindent(s, sep='\n'): - leastspaces = -1 - lines = s.split(sep) - for line in lines: - if not line.strip(): - continue - spaces = len(line) - len(line.lstrip()) - if leastspaces == -1 or spaces < leastspaces: - leastspaces = spaces - if leastspaces == -1: - return s - for i, line in enumerate(lines): - if not line.strip(): - lines[i] = '' - else: - lines[i] = line[leastspaces:] - return sep.join(lines) - -class ReSTSyntaxTest(py.test.collect.Item): - def __init__(self, name, parent, project): - super(ReSTSyntaxTest, self).__init__(name=name, parent=parent) - self.project = project - - def reportinfo(self): - return self.fspath, None, "syntax check" - - def runtest(self): - self.restcheck(py.path.svnwc(self.fspath)) - - def restcheck(self, path): - py.test.importorskip("docutils") - self.register_linkrole() - from docutils.utils import SystemMessage - try: - self._checkskip(path, self.project.get_htmloutputpath(path)) - self.project.process(path) - except KeyboardInterrupt: - raise - except SystemExit, error: - if error.message == "ERROR: dot not found": - py.test.skip("system doesn't have graphviz installed") - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") - - def register_linkrole(self): - #directive.register_linkrole('api', self.resolve_linkrole) - #directive.register_linkrole('source', self.resolve_linkrole) -# -# # XXX fake sphinx' "toctree" and refs -# directive.register_linkrole('ref', self.resolve_linkrole) - - from docutils.parsers.rst import directives - def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - toctree_directive.content = 1 - toctree_directive.options = {'maxdepth': int, 'glob': directives.flag, - 'hidden': directives.flag} - directives.register_directive('toctree', toctree_directive) - self.register_pygments() - - def register_pygments(self): - # taken from pygments-main/external/rst-directive.py - from docutils.parsers.rst import directives - try: - from pygments.formatters import HtmlFormatter - except ImportError: - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - pygments_directive.options = {} - else: - # The default formatter - DEFAULT = HtmlFormatter(noclasses=True) - # Add name -> formatter pairs for every variant you want to use - VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), - } - - from docutils import nodes - - from pygments import highlight - from pygments.lexers import get_lexer_by_name, TextLexer - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight('\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - - pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - directives.register_directive('sourcecode', pygments_directive) - - def resolve_linkrole(self, name, text, check=True): - apigen_relpath = self.project.apigen_relpath - - if name == 'api': - if text == 'py': - return ('py', apigen_relpath + 'api/index.html') - else: - assert text.startswith('py.'), ( - 'api link "%s" does not point to the py package') % (text,) - dotted_name = text - if dotted_name.find('(') > -1: - dotted_name = dotted_name[:text.find('(')] - # remove pkg root - path = dotted_name.split('.')[1:] - dotted_name = '.'.join(path) - obj = py - if check: - for chunk in path: - try: - obj = getattr(obj, chunk) - except AttributeError: - raise AssertionError( - 'problem with linkrole :api:`%s`: can not resolve ' - 'dotted name %s' % (text, dotted_name,)) - return (text, apigen_relpath + 'api/%s.html' % (dotted_name,)) - elif name == 'source': - assert text.startswith('py/'), ('source link "%s" does not point ' - 'to the py package') % (text,) - relpath = '/'.join(text.split('/')[1:]) - if check: - pkgroot = py._pydir - abspath = pkgroot.join(relpath) - assert pkgroot.join(relpath).check(), ( - 'problem with linkrole :source:`%s`: ' - 'path %s does not exist' % (text, relpath)) - if relpath.endswith('/') or not relpath: - relpath += 'index.html' - else: - relpath += '.html' - return (text, apigen_relpath + 'source/%s' % (relpath,)) - elif name == 'ref': - return ("", "") - - def _checkskip(self, lpath, htmlpath=None): - if not self.config.getvalue("forcegen"): - lpath = py.path.local(lpath) - if htmlpath is not None: - htmlpath = py.path.local(htmlpath) - if lpath.ext == '.txt': - htmlpath = htmlpath or lpath.new(ext='.html') - if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): - py.test.skip("html file is up to date, use --forcegen to regenerate") - #return [] # no need to rebuild - -class DoctestText(py.test.collect.Item): - def reportinfo(self): - return self.fspath, None, "doctest" - - def runtest(self): - content = self._normalize_linesep() - newcontent = self.config.hook.pytest_doctest_prepare_content(content=content) - if newcontent is not None: - content = newcontent - s = content - l = [] - prefix = '.. >>> ' - mod = py.std.types.ModuleType(self.fspath.purebasename) - skipchunk = False - for line in deindent(s).split('\n'): - stripped = line.strip() - if skipchunk and line.startswith(skipchunk): - py.builtin.print_("skipping", line) - continue - skipchunk = False - if stripped.startswith(prefix): - try: - py.builtin.exec_(py.code.Source( - stripped[len(prefix):]).compile(), mod.__dict__) - except ValueError: - e = sys.exc_info()[1] - if e.args and e.args[0] == "skipchunk": - skipchunk = " " * (len(line) - len(line.lstrip())) - else: - raise - else: - l.append(line) - docstring = "\n".join(l) - mod.__doc__ = docstring - failed, tot = py.std.doctest.testmod(mod, verbose=1) - if failed: - py.test.fail("doctest %s: %s failed out of %s" %( - self.fspath, failed, tot)) - - def _normalize_linesep(self): - # XXX quite nasty... but it works (fixes win32 issues) - s = self.fspath.read() - linesep = '\n' - if '\r' in s: - if '\n' not in s: - linesep = '\r' - else: - linesep = '\r\n' - s = s.replace(linesep, '\n') - return s - -class LinkCheckerMaker(py.test.collect.Collector): - def collect(self): - return list(self.genlinkchecks()) - - def genlinkchecks(self): - path = self.fspath - # generating functions + args as single tests - timeout = self.config.getvalue("urlcheck_timeout") - for lineno, line in enumerate(path.readlines()): - line = line.strip() - if line.startswith('.. _'): - if line.startswith('.. _`'): - delim = '`:' - else: - delim = ':' - l = line.split(delim, 1) - if len(l) != 2: - continue - tryfn = l[1].strip() - name = "%s:%d" %(tryfn, lineno) - if tryfn.startswith('http:') or tryfn.startswith('https'): - if self.config.getvalue("urlcheck"): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno, timeout), checkfunc=urlcheck) - elif tryfn.startswith('webcal:'): - continue - else: - i = tryfn.find('#') - if i != -1: - checkfn = tryfn[:i] - else: - checkfn = tryfn - if checkfn.strip() and (1 or checkfn.endswith('.html')): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno), checkfunc=localrefcheck) - -class CheckLink(py.test.collect.Item): - def __init__(self, name, parent, args, checkfunc): - super(CheckLink, self).__init__(name, parent) - self.args = args - self.checkfunc = checkfunc - - def runtest(self): - return self.checkfunc(*self.args) - - def reportinfo(self, basedir=None): - return (self.fspath, self.args[2], "checklink: %s" % self.args[0]) - -def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): - old = py.std.socket.getdefaulttimeout() - py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN) - try: - try: - py.builtin.print_("trying remote", tryfn) - py.std.urllib2.urlopen(tryfn) - finally: - py.std.socket.setdefaulttimeout(old) - except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): - e = sys.exc_info()[1] - if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden - py.test.skip("%s: %s" %(tryfn, str(e))) - else: - py.test.fail("remote reference error %r in %s:%d\n%s" %( - tryfn, path.basename, lineno+1, e)) - -def localrefcheck(tryfn, path, lineno): - # assume it should be a file - i = tryfn.find('#') - if tryfn.startswith('javascript:'): - return # don't check JS refs - if i != -1: - anchor = tryfn[i+1:] - tryfn = tryfn[:i] - else: - anchor = '' - fn = path.dirpath(tryfn) - ishtml = fn.ext == '.html' - fn = ishtml and fn.new(ext='.txt') or fn - py.builtin.print_("filename is", fn) - if not fn.check(): # not ishtml or not fn.check(): - if not py.path.local(tryfn).check(): # the html could be there - py.test.fail("reference error %r in %s:%d" %( - tryfn, path.basename, lineno+1)) - if anchor: - source = unicode(fn.read(), 'latin1') - source = source.lower().replace('-', ' ') # aehem - - anchor = anchor.replace('-', ' ') - match2 = ".. _`%s`:" % anchor - match3 = ".. _%s:" % anchor - candidates = (anchor, match2, match3) - py.builtin.print_("candidates", repr(candidates)) - for line in source.split('\n'): - line = line.strip() - if line in candidates: - break - else: - py.test.fail("anchor reference error %s#%s in %s:%d" %( - tryfn, anchor, path.basename, lineno+1)) - -if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()): - def log(msg): - print(msg) -else: - def log(msg): - pass - -def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'): - """ return html latin1-encoded document for the given input. - source a ReST-string - sourcepath where to look for includes (basically) - stylesheet path (to be used if any) - """ - from docutils.core import publish_string - kwargs = { - 'stylesheet' : stylesheet, - 'stylesheet_path': None, - 'traceback' : 1, - 'embed_stylesheet': 0, - 'output_encoding' : encoding, - #'halt' : 0, # 'info', - 'halt_level' : 2, - } - # docutils uses os.getcwd() :-( - source_path = os.path.abspath(str(source_path)) - prevdir = os.getcwd() - try: - #os.chdir(os.path.dirname(source_path)) - return publish_string(source, source_path, writer_name='html', - settings_overrides=kwargs) - finally: - os.chdir(prevdir) - -def process(txtpath, encoding='latin1'): - """ process a textfile """ - log("processing %s" % txtpath) - assert txtpath.check(ext='.txt') - if isinstance(txtpath, py.path.svnwc): - txtpath = txtpath.localpath - htmlpath = txtpath.new(ext='.html') - #svninfopath = txtpath.localpath.new(ext='.svninfo') - - style = txtpath.dirpath('style.css') - if style.check(): - stylesheet = style.basename - else: - stylesheet = None - content = unicode(txtpath.read(), encoding) - doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding) - htmlpath.open('wb').write(doc) - #log("wrote %r" % htmlpath) - #if txtpath.check(svnwc=1, versioned=1): - # info = txtpath.info() - # svninfopath.dump(info) - -if sys.version_info > (3, 0): - def _uni(s): return s -else: - def _uni(s): - return unicode(s) - -rex1 = re.compile(r'.*(.*).*', re.MULTILINE | re.DOTALL) -rex2 = re.compile(r'.*
(.*)
.*', re.MULTILINE | re.DOTALL) - -def strip_html_header(string, encoding='utf8'): - """ return the content of the body-tag """ - uni = unicode(string, encoding) - for rex in rex1,rex2: - match = rex.search(uni) - if not match: - break - uni = match.group(1) - return uni - -class Project: # used for confrest.py files - def __init__(self, sourcepath): - self.sourcepath = sourcepath - def process(self, path): - return process(path) - def get_htmloutputpath(self, path): - return path.new(ext='html') diff --git a/py/_code/oldmagic.py b/py/_code/oldmagic.py deleted file mode 100644 --- a/py/_code/oldmagic.py +++ /dev/null @@ -1,62 +0,0 @@ -""" deprecated module for turning on/off some features. """ - -import py - -from py.builtin import builtins as cpy_builtin - -def invoke(assertion=False, compile=False): - """ (deprecated) invoke magic, currently you can specify: - - assertion patches the builtin AssertionError to try to give - more meaningful AssertionErrors, which by means - of deploying a mini-interpreter constructs - a useful error message. - """ - py.log._apiwarn("1.1", - "py.magic.invoke() is deprecated, use py.code.patch_builtins()", - stacklevel=2, - ) - py.code.patch_builtins(assertion=assertion, compile=compile) - -def revoke(assertion=False, compile=False): - """ (deprecated) revoke previously invoked magic (see invoke()).""" - py.log._apiwarn("1.1", - "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()", - stacklevel=2, - ) - py.code.unpatch_builtins(assertion=assertion, compile=compile) - -patched = {} - -def patch(namespace, name, value): - """ (deprecated) rebind the 'name' on the 'namespace' to the 'value', - possibly and remember the original value. Multiple - invocations to the same namespace/name pair will - remember a list of old values. - """ - py.log._apiwarn("1.1", - "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - orig = getattr(namespace, name) - patched.setdefault(nref, []).append(orig) - setattr(namespace, name, value) - return orig - -def revert(namespace, name): - """ (deprecated) revert to the orginal value the last patch modified. - Raise ValueError if no such original value exists. - """ - py.log._apiwarn("1.1", - "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - if nref not in patched or not patched[nref]: - raise ValueError("No original value stored for %s.%s" % nref) - current = getattr(namespace, name) - orig = patched[nref].pop() - setattr(namespace, name, orig) - return current - diff --git a/py/_plugin/pytest_default.py b/py/_plugin/pytest_default.py deleted file mode 100644 --- a/py/_plugin/pytest_default.py +++ /dev/null @@ -1,131 +0,0 @@ -""" default hooks and general py.test options. """ - -import sys -import py - -def pytest_pyfunc_call(__multicall__, pyfuncitem): - if not __multicall__.execute(): - testfunction = pyfuncitem.obj - if pyfuncitem._isyieldedfunction(): - testfunction(*pyfuncitem._args) - else: - funcargs = pyfuncitem.funcargs - testfunction(**funcargs) - -def pytest_collect_file(path, parent): - ext = path.ext - pb = path.purebasename - if pb.startswith("test_") or pb.endswith("_test") or \ - path in parent.config._argfspaths: - if ext == ".py": - return parent.ihook.pytest_pycollect_makemodule( - path=path, parent=parent) - -def pytest_pycollect_makemodule(path, parent): - return parent.Module(path, parent) - -def pytest_funcarg__pytestconfig(request): - """ the pytest config object with access to command line opts.""" - return request.config - -def pytest_ignore_collect(path, config): - ignore_paths = config.getconftest_pathlist("collect_ignore", path=path) - ignore_paths = ignore_paths or [] - excludeopt = config.getvalue("ignore") - if excludeopt: - ignore_paths.extend([py.path.local(x) for x in excludeopt]) - return path in ignore_paths - # XXX more refined would be: - if ignore_paths: - for p in ignore_paths: - if path == p or path.relto(p): - return True - - -def pytest_collect_directory(path, parent): - # XXX reconsider the following comment - # not use parent.Directory here as we generally - # want dir/conftest.py to be able to - # define Directory(dir) already - if not parent.recfilter(path): # by default special ".cvs", ... - # check if cmdline specified this dir or a subdir directly - for arg in parent.config._argfspaths: - if path == arg or arg.relto(path): - break - else: - return - Directory = parent.config._getcollectclass('Directory', path) - return Directory(path, parent=parent) - -def pytest_report_iteminfo(item): - return item.reportinfo() - -def pytest_addoption(parser): - group = parser.getgroup("general", "running and selection options") - group._addoption('-x', '--exitfirst', action="store_true", default=False, - dest="exitfirst", - help="exit instantly on first error or failed test."), - group._addoption('--maxfail', metavar="num", - action="store", type="int", dest="maxfail", default=0, - help="exit after first num failures or errors.") - group._addoption('-k', - action="store", dest="keyword", default='', - help="only run test items matching the given " - "space separated keywords. precede a keyword with '-' to negate. " - "Terminate the expression with ':' to treat a match as a signal " - "to run all subsequent tests. ") - - group = parser.getgroup("collect", "collection") - group.addoption('--collectonly', - action="store_true", dest="collectonly", - help="only collect tests, don't execute them."), - group.addoption("--ignore", action="append", metavar="path", - help="ignore path during collection (multi-allowed).") - group.addoption('--confcutdir', dest="confcutdir", default=None, - metavar="dir", - help="only load conftest.py's relative to specified dir.") - - group = parser.getgroup("debugconfig", - "test process debugging and configuration") - group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", - help="base temporary directory for this test run.") - -def pytest_configure(config): - setsession(config) - # compat - if config.getvalue("exitfirst"): - config.option.maxfail = 1 - -def setsession(config): - val = config.getvalue - if val("collectonly"): - from py._test.session import Session - config.setsessionclass(Session) - -# pycollect related hooks and code, should move to pytest_pycollect.py - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - res = __multicall__.execute() - if res is not None: - return res - if collector._istestclasscandidate(name, obj): - res = collector._deprecated_join(name) - if res is not None: - return res - return collector.Class(name, parent=collector) - elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): - res = collector._deprecated_join(name) - if res is not None: - return res - if is_generator(obj): - # XXX deprecation warning - return collector.Generator(name, parent=collector) - else: - return collector._genfunctions(name, obj) - -def is_generator(func): - try: - return py.code.getrawcode(func).co_flags & 32 # generator function - except AttributeError: # builtin functions have no bytecode - # assume them to not be generators - return False diff --git a/py/_plugin/pytest_hooklog.py b/py/_plugin/pytest_hooklog.py deleted file mode 100644 --- a/py/_plugin/pytest_hooklog.py +++ /dev/null @@ -1,33 +0,0 @@ -""" log invocations of extension hooks to a file. """ -import py - -def pytest_addoption(parser): - parser.addoption("--hooklog", dest="hooklog", default=None, - help="write hook calls to the given file.") - -def pytest_configure(config): - hooklog = config.getvalue("hooklog") - if hooklog: - config._hooklogfile = open(hooklog, 'w') - config._hooklog_oldperformcall = config.hook._performcall - config.hook._performcall = (lambda name, multicall: - logged_call(name=name, multicall=multicall, config=config)) - -def logged_call(name, multicall, config): - f = config._hooklogfile - f.write("%s(**%s)\n" % (name, multicall.kwargs)) - try: - res = config._hooklog_oldperformcall(name=name, multicall=multicall) - except: - f.write("-> exception") - raise - f.write("-> %r" % (res,)) - return res - -def pytest_unconfigure(config): - try: - del config.hook.__dict__['_performcall'] - except KeyError: - pass - else: - config._hooklogfile.close() diff --git a/py/_cmdline/pycleanup.py b/py/_cmdline/pycleanup.py deleted file mode 100755 --- a/py/_cmdline/pycleanup.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.cleanup [PATH] ... - -Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot. Optionally remove setup.py related files and empty -directories. - -""" -import py -import sys, subprocess - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - parser.add_option("-e", metavar="ENDING", - dest="endings", default=[".pyc", "$py.class"], action="append", - help=("(multi) recursively remove files with the given ending." - " '.pyc' and '$py.class' are in the default list.")) - parser.add_option("-d", action="store_true", dest="removedir", - help="remove empty directories.") - parser.add_option("-s", action="store_true", dest="setup", - help="remove 'build' and 'dist' directories next to setup.py files") - parser.add_option("-a", action="store_true", dest="all", - help="synonym for '-S -d -e pip-log.txt'") - parser.add_option("-n", "--dryrun", dest="dryrun", default=False, - action="store_true", - help="don't actually delete but display would-be-removed filenames.") - (options, args) = parser.parse_args() - - Cleanup(options, args).main() - -class Cleanup: - def __init__(self, options, args): - if not args: - args = ["."] - self.options = options - self.args = [py.path.local(x) for x in args] - if options.all: - options.setup = True - options.removedir = True - options.endings.append("pip-log.txt") - - def main(self): - if self.options.setup: - for arg in self.args: - self.setupclean(arg) - - for path in self.args: - py.builtin.print_("cleaning path", path, - "of extensions", self.options.endings) - for x in path.visit(self.shouldremove, self.recursedir): - self.remove(x) - if self.options.removedir: - for x in path.visit(lambda x: x.check(dir=1), self.recursedir): - if not x.listdir(): - self.remove(x) - - def shouldremove(self, p): - for ending in self.options.endings: - if p.basename.endswith(ending): - return True - - def recursedir(self, path): - return path.check(dotfile=0, link=0) - - def remove(self, path): - if not path.check(): - return - if self.options.dryrun: - py.builtin.print_("would remove", path) - else: - py.builtin.print_("removing", path) - path.remove() - - def XXXcallsetup(self, setup, *args): - old = setup.dirpath().chdir() - try: - subprocess.call([sys.executable, str(setup)] + list(args)) - finally: - old.chdir() - - def setupclean(self, path): - for x in path.visit("setup.py", self.recursedir): - basepath = x.dirpath() - self.remove(basepath / "build") - self.remove(basepath / "dist") diff --git a/py/bin/py.which b/py/bin/py.which deleted file mode 100755 --- a/py/bin/py.which +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pywhich() \ No newline at end of file diff --git a/py/_plugin/pytest_junitxml.py b/py/_plugin/pytest_junitxml.py deleted file mode 100644 --- a/py/_plugin/pytest_junitxml.py +++ /dev/null @@ -1,171 +0,0 @@ -""" - logging of test results in JUnit-XML format, for use with Hudson - and build integration servers. Based on initial code from Ross Lawley. -""" - -import py -import time - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group.addoption('--junitxml', action="store", dest="xmlpath", - metavar="path", default=None, - help="create junit-xml style report file at given path.") - -def pytest_configure(config): - xmlpath = config.option.xmlpath - if xmlpath: - config._xml = LogXML(xmlpath) - config.pluginmanager.register(config._xml) - -def pytest_unconfigure(config): - xml = getattr(config, '_xml', None) - if xml: - del config._xml - config.pluginmanager.unregister(xml) - -class LogXML(object): - def __init__(self, logfile): - self.logfile = logfile - self.test_logs = [] - self.passed = self.skipped = 0 - self.failed = self.errors = 0 - self._durations = {} - - def _opentestcase(self, report): - node = report.item - d = {'time': self._durations.pop(report.item, "0")} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def _closetestcase(self): - self.test_logs.append("") - - def appendlog(self, fmt, *args): - args = tuple([py.xml.escape(arg) for arg in args]) - self.test_logs.append(fmt % args) - - def append_pass(self, report): - self.passed += 1 - self._opentestcase(report) - self._closetestcase() - - def append_failure(self, report): - self._opentestcase(report) - #msg = str(report.longrepr.reprtraceback.extraline) - if "xfail" in report.keywords: - self.appendlog( - '') - self.skipped += 1 - else: - self.appendlog('%s', - report.longrepr) - self.failed += 1 - self._closetestcase() - - def _opentestcase_collectfailure(self, report): - node = report.collector - d = {'time': '???'} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def append_collect_failure(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_collect_skipped(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.skipped += 1 - - def append_error(self, report): - self._opentestcase(report) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_skipped(self, report): - self._opentestcase(report) - if "xfail" in report.keywords: - self.appendlog( - '%s', - report.keywords['xfail']) - else: - self.appendlog("") - self._closetestcase() - self.skipped += 1 - - def pytest_runtest_logreport(self, report): - if report.passed: - self.append_pass(report) - elif report.failed: - if report.when != "call": - self.append_error(report) - else: - self.append_failure(report) - elif report.skipped: - self.append_skipped(report) - - def pytest_runtest_call(self, item, __multicall__): - start = time.time() - try: - return __multicall__.execute() - finally: - self._durations[item] = time.time() - start - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.append_collect_failure(report) - else: - self.append_collect_skipped(report) - - def pytest_internalerror(self, excrepr): - self.errors += 1 - data = py.xml.escape(excrepr) - self.test_logs.append( - '\n' - ' ' - '%s' % data) - - def pytest_sessionstart(self, session): - self.suite_start_time = time.time() - - def pytest_sessionfinish(self, session, exitstatus, __multicall__): - if py.std.sys.version_info[0] < 3: - logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8') - else: - logfile = open(self.logfile, 'w', encoding='utf-8') - - suite_stop_time = time.time() - suite_time_delta = suite_stop_time - self.suite_start_time - numtests = self.passed + self.failed - logfile.write('') - logfile.write('') - logfile.writelines(self.test_logs) - logfile.write('') - logfile.close() - tw = session.config.pluginmanager.getplugin("terminalreporter")._tw - tw.line() - tw.sep("-", "generated xml file: %s" %(self.logfile)) diff --git a/py/_cmdline/pywhich.py b/py/_cmdline/pywhich.py deleted file mode 100755 --- a/py/_cmdline/pywhich.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.which [name] - -print the location of the given python module or package name -""" - -import sys - -def main(): - name = sys.argv[1] - try: - mod = __import__(name) - except ImportError: - sys.stderr.write("could not import: " + name + "\n") - else: - try: - location = mod.__file__ - except AttributeError: - sys.stderr.write("module (has no __file__): " + str(mod)) - else: - print(location) diff --git a/py/_path/gateway/remotepath.py b/py/_path/gateway/remotepath.py deleted file mode 100644 --- a/py/_path/gateway/remotepath.py +++ /dev/null @@ -1,47 +0,0 @@ -import py, itertools -from py._path import common - -COUNTER = itertools.count() - -class RemotePath(common.PathBase): - sep = '/' - - def __init__(self, channel, id, basename=None): - self._channel = channel - self._id = id - self._basename = basename - self._specs = {} - - def __del__(self): - self._channel.send(('DEL', self._id)) - - def __repr__(self): - return 'RemotePath(%s)' % self.basename - - def listdir(self, *args): - self._channel.send(('LIST', self._id) + args) - return [RemotePath(self._channel, id, basename) - for (id, basename) in self._channel.receive()] - - def dirpath(self): - id = ~COUNTER.next() - self._channel.send(('DIRPATH', self._id, id)) - return RemotePath(self._channel, id) - - def join(self, *args): - id = ~COUNTER.next() - self._channel.send(('JOIN', self._id, id) + args) - return RemotePath(self._channel, id) - - def _getbyspec(self, spec): - parts = spec.split(',') - ask = [x for x in parts if x not in self._specs] - if ask: - self._channel.send(('GET', self._id, ",".join(ask))) - for part, value in zip(ask, self._channel.receive()): - self._specs[part] = value - return [self._specs[x] for x in parts] - - def read(self): - self._channel.send(('READ', self._id)) - return self._channel.receive() diff --git a/py/_plugin/pytest_doctest.py b/py/_plugin/pytest_doctest.py deleted file mode 100644 --- a/py/_plugin/pytest_doctest.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -collect and execute doctests from modules and test files. - -Usage -------------- - -By default all files matching the ``test*.txt`` pattern will -be run through the python standard ``doctest`` module. Issue:: - - py.test --doctest-glob='*.rst' - -to change the pattern. Additionally you can trigger running of -tests in all python modules (including regular python test modules):: - - py.test --doctest-modules - -You can also make these changes permanent in your project by -putting them into a conftest.py file like this:: - - # content of conftest.py - option_doctestmodules = True - option_doctestglob = "*.rst" -""" - -import py -from py._code.code import TerminalRepr, ReprFileLocation -import doctest - -def pytest_addoption(parser): - group = parser.getgroup("collect") - group.addoption("--doctest-modules", - action="store_true", default=False, - help="run doctests in all .py modules", - dest="doctestmodules") - group.addoption("--doctest-glob", - action="store", default="test*.txt", metavar="pat", - help="doctests file matching pattern, default: test*.txt", - dest="doctestglob") - -def pytest_collect_file(path, parent): - config = parent.config - if path.ext == ".py": - if config.getvalue("doctestmodules"): - return DoctestModule(path, parent) - elif path.check(fnmatch=config.getvalue("doctestglob")): - return DoctestTextfile(path, parent) - -class ReprFailDoctest(TerminalRepr): - def __init__(self, reprlocation, lines): - self.reprlocation = reprlocation - self.lines = lines - def toterminal(self, tw): - for line in self.lines: - tw.line(line) - self.reprlocation.toterminal(tw) - -class DoctestItem(py.test.collect.Item): - def __init__(self, path, parent): - name = self.__class__.__name__ + ":" + path.basename - super(DoctestItem, self).__init__(name=name, parent=parent) - self.fspath = path - - def repr_failure(self, excinfo): - if excinfo.errisinstance(doctest.DocTestFailure): - doctestfailure = excinfo.value - example = doctestfailure.example - test = doctestfailure.test - filename = test.filename - lineno = test.lineno + example.lineno + 1 - message = excinfo.type.__name__ - reprlocation = ReprFileLocation(filename, lineno, message) - checker = doctest.OutputChecker() - REPORT_UDIFF = doctest.REPORT_UDIFF - filelines = py.path.local(filename).readlines(cr=0) - i = max(test.lineno, max(0, lineno - 10)) # XXX? - lines = [] - for line in filelines[i:lineno]: - lines.append("%03d %s" % (i+1, line)) - i += 1 - lines += checker.output_difference(example, - doctestfailure.got, REPORT_UDIFF).split("\n") - return ReprFailDoctest(reprlocation, lines) - elif excinfo.errisinstance(doctest.UnexpectedException): - excinfo = py.code.ExceptionInfo(excinfo.value.exc_info) - return super(DoctestItem, self).repr_failure(excinfo) - else: - return super(DoctestItem, self).repr_failure(excinfo) - -class DoctestTextfile(DoctestItem): - def runtest(self): - if not self._deprecated_testexecution(): - failed, tot = doctest.testfile( - str(self.fspath), module_relative=False, - raise_on_error=True, verbose=0) - -class DoctestModule(DoctestItem): - def runtest(self): - module = self.fspath.pyimport() - failed, tot = doctest.testmod( - module, raise_on_error=True, verbose=0) diff --git a/py/bin/win32/py.test.cmd b/py/bin/win32/py.test.cmd deleted file mode 100644 --- a/py/bin/win32/py.test.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.test" %* \ No newline at end of file diff --git a/py/_test/config.py b/py/_test/config.py deleted file mode 100644 --- a/py/_test/config.py +++ /dev/null @@ -1,291 +0,0 @@ -import py, os -from py._test.conftesthandle import Conftest -from py._test.pluginmanager import PluginManager -from py._test import parseopt -from py._test.collect import RootCollector - -def ensuretemp(string, dir=1): - """ (deprecated) return temporary directory path with - the given string as the trailing part. It is usually - better to use the 'tmpdir' function argument which will - take care to provide empty unique directories for each - test call even if the test is called multiple times. - """ - #py.log._apiwarn(">1.1", "use tmpdir function argument") - return py.test.config.ensuretemp(string, dir=dir) - -class CmdOptions(object): - """ holds cmdline options as attributes.""" - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - def __repr__(self): - return "" %(self.__dict__,) - -class Error(Exception): - """ Test Configuration Error. """ - -class Config(object): - """ access to config values, pluginmanager and plugin hooks. """ - Option = py.std.optparse.Option - Error = Error - basetemp = None - _sessionclass = None - - def __init__(self, topdir=None, option=None): - self.option = option or CmdOptions() - self.topdir = topdir - self._parser = parseopt.Parser( - usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]", - processopt=self._processopt, - ) - self.pluginmanager = PluginManager() - self._conftest = Conftest(onimport=self._onimportconftest) - self.hook = self.pluginmanager.hook - - def _onimportconftest(self, conftestmodule): - self.trace("loaded conftestmodule %r" %(conftestmodule,)) - self.pluginmanager.consider_conftest(conftestmodule) - - def _getmatchingplugins(self, fspath): - allconftests = self._conftest._conftestpath2mod.values() - plugins = [x for x in self.pluginmanager.getplugins() - if x not in allconftests] - plugins += self._conftest.getconftestmodules(fspath) - return plugins - - def trace(self, msg): - if getattr(self.option, 'traceconfig', None): - self.hook.pytest_trace(category="config", msg=msg) - - def _processopt(self, opt): - if hasattr(opt, 'default') and opt.dest: - val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None) - if val is not None: - if opt.type == "int": - val = int(val) - elif opt.type == "long": - val = long(val) - elif opt.type == "float": - val = float(val) - elif not opt.type and opt.action in ("store_true", "store_false"): - val = eval(val) - opt.default = val - else: - name = "option_" + opt.dest - try: - opt.default = self._conftest.rget(name) - except (ValueError, KeyError): - pass - if not hasattr(self.option, opt.dest): - setattr(self.option, opt.dest, opt.default) - - def _preparse(self, args): - self.pluginmanager.consider_setuptools_entrypoints() - self.pluginmanager.consider_env() - self.pluginmanager.consider_preparse(args) - self._conftest.setinitial(args) - self.pluginmanager.do_addoption(self._parser) - - def parse(self, args): - """ parse cmdline arguments into this config object. - Note that this can only be called once per testing process. - """ - assert not hasattr(self, 'args'), ( - "can only parse cmdline args at most once per Config object") - self._preparse(args) - self._parser.hints.extend(self.pluginmanager._hints) - args = self._parser.parse_setoption(args, self.option) - if not args: - args.append(py.std.os.getcwd()) - self.topdir = gettopdir(args) - self._rootcol = RootCollector(config=self) - self._setargs(args) - - def _setargs(self, args): - self.args = list(args) - self._argfspaths = [py.path.local(decodearg(x)[0]) for x in args] - - # config objects are usually pickled across system - # barriers but they contain filesystem paths. - # upon getstate/setstate we take care to do everything - # relative to "topdir". - def __getstate__(self): - l = [] - for path in self.args: - path = py.path.local(path) - l.append(path.relto(self.topdir)) - return l, self.option.__dict__ - - def __setstate__(self, repr): - # we have to set py.test.config because loading - # of conftest files may use it (deprecated) - # mainly by py.test.config.addoptions() - global config_per_process - py.test.config = config_per_process = self - args, cmdlineopts = repr - cmdlineopts = CmdOptions(**cmdlineopts) - # next line will registers default plugins - self.__init__(topdir=py.path.local(), option=cmdlineopts) - self._rootcol = RootCollector(config=self) - args = [str(self.topdir.join(x)) for x in args] - self._preparse(args) - self._setargs(args) - - def ensuretemp(self, string, dir=True): - return self.getbasetemp().ensure(string, dir=dir) - - def getbasetemp(self): - if self.basetemp is None: - basetemp = self.option.basetemp - if basetemp: - basetemp = py.path.local(basetemp) - if not basetemp.check(dir=1): - basetemp.mkdir() - else: - basetemp = py.path.local.make_numbered_dir(prefix='pytest-') - self.basetemp = basetemp - return self.basetemp - - def mktemp(self, basename, numbered=False): - basetemp = self.getbasetemp() - if not numbered: - return basetemp.mkdir(basename) - else: - return py.path.local.make_numbered_dir(prefix=basename, - keep=0, rootdir=basetemp, lock_timeout=None) - - def getinitialnodes(self): - return [self.getnode(arg) for arg in self.args] - - def getnode(self, arg): - parts = decodearg(arg) - path = py.path.local(parts.pop(0)) - if not path.check(): - raise self.Error("file not found: %s" %(path,)) - topdir = self.topdir - if path != topdir and not path.relto(topdir): - raise self.Error("path %r is not relative to %r" % - (str(path), str(topdir))) - # assumtion: pytest's fs-collector tree follows the filesystem tree - names = list(filter(None, path.relto(topdir).split(path.sep))) - names += parts - try: - return self._rootcol.getbynames(names) - except ValueError: - e = py.std.sys.exc_info()[1] - raise self.Error("can't collect: %s\n%s" % (arg, e.args[0])) - - def _getcollectclass(self, name, path): - try: - cls = self._conftest.rget(name, path) - except KeyError: - return getattr(py.test.collect, name) - else: - py.log._apiwarn(">1.1", "%r was found in a conftest.py file, " - "use pytest_collect hooks instead." % (cls,)) - return cls - - def getconftest_pathlist(self, name, path=None): - """ return a matching value, which needs to be sequence - of filenames that will be returned as a list of Path - objects (they can be relative to the location - where they were found). - """ - try: - mod, relroots = self._conftest.rget_with_confmod(name, path) - except KeyError: - return None - modpath = py.path.local(mod.__file__).dirpath() - l = [] - for relroot in relroots: - if not isinstance(relroot, py.path.local): - relroot = relroot.replace("/", py.path.local.sep) - relroot = modpath.join(relroot, abs=True) - l.append(relroot) - return l - - def addoptions(self, groupname, *specs): - """ add a named group of options to the current testing session. - This function gets invoked during testing session initialization. - """ - py.log._apiwarn("1.0", "define pytest_addoptions(parser) to add options", stacklevel=2) - group = self._parser.getgroup(groupname) - for opt in specs: - group._addoption_instance(opt) - return self.option - - def addoption(self, *optnames, **attrs): - return self._parser.addoption(*optnames, **attrs) - - def getvalueorskip(self, name, path=None): - """ return getvalue() or call py.test.skip if no value exists. """ - try: - val = self.getvalue(name, path) - if val is None: - raise KeyError(name) - return val - except KeyError: - py.test.skip("no %r value found" %(name,)) - - def getvalue(self, name, path=None): - """ return 'name' value looked up from the 'options' - and then from the first conftest file found up - the path (including the path itself). - if path is None, lookup the value in the initial - conftest modules found during command line parsing. - """ - try: - return getattr(self.option, name) - except AttributeError: - return self._conftest.rget(name, path) - - def setsessionclass(self, cls): - if self._sessionclass is not None: - raise ValueError("sessionclass already set to: %r" %( - self._sessionclass)) - self._sessionclass = cls - - def initsession(self): - """ return an initialized session object. """ - cls = self._sessionclass - if cls is None: - from py._test.session import Session - cls = Session - session = cls(self) - self.trace("instantiated session %r" % session) - return session - -# -# helpers -# - -def gettopdir(args): - """ return the top directory for the given paths. - if the common base dir resides in a python package - parent directory of the root package is returned. - """ - fsargs = [py.path.local(decodearg(arg)[0]) for arg in args] - p = fsargs and fsargs[0] or None - for x in fsargs[1:]: - p = p.common(x) - assert p, "cannot determine common basedir of %s" %(fsargs,) - pkgdir = p.pypkgpath() - if pkgdir is None: - if p.check(file=1): - p = p.dirpath() - return p - else: - return pkgdir.dirpath() - -def decodearg(arg): - arg = str(arg) - return arg.split("::") - -def onpytestaccess(): - # it's enough to have our containing module loaded as - # it initializes a per-process config instance - # which loads default plugins which add to py.test.* - pass - -# a default per-process instance of py.test configuration -config_per_process = Config() diff --git a/py/_plugin/pytest_assertion.py b/py/_plugin/pytest_assertion.py deleted file mode 100644 --- a/py/_plugin/pytest_assertion.py +++ /dev/null @@ -1,28 +0,0 @@ -import py -import sys - -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group._addoption('--no-assert', action="store_true", default=False, - dest="noassert", - help="disable python assert expression reinterpretation."), - -def pytest_configure(config): - if not config.getvalue("noassert") and not config.getvalue("nomagic"): - warn_about_missing_assertion() - config._oldassertion = py.builtin.builtins.AssertionError - py.builtin.builtins.AssertionError = py.code._AssertionError - -def pytest_unconfigure(config): - if hasattr(config, '_oldassertion'): - py.builtin.builtins.AssertionError = config._oldassertion - del config._oldassertion - -def warn_about_missing_assertion(): - try: - assert False - except AssertionError: - pass - else: - py.std.warnings.warn("Assertions are turned off!" - " (are you using python -O?)") diff --git a/py/bin/py.countloc b/py/bin/py.countloc deleted file mode 100755 --- a/py/bin/py.countloc +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycountloc() \ No newline at end of file diff --git a/py/bin/_findpy.py b/py/bin/_findpy.py --- a/py/bin/_findpy.py +++ b/py/bin/_findpy.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python +#!/usr/bin/env python # # find and import a version of 'py' @@ -29,9 +29,10 @@ if not searchpy(abspath(os.curdir)): if not searchpy(opd(abspath(sys.argv[0]))): if not searchpy(opd(__file__)): - pass # let's hope it is just on sys.path + pass # let's hope it is just on sys.path import py +import pytest -if __name__ == '__main__': +if __name__ == '__main__': print ("py lib is at %s" % py.__file__) diff --git a/py/_plugin/pytest__pytest.py b/py/_plugin/pytest__pytest.py deleted file mode 100644 --- a/py/_plugin/pytest__pytest.py +++ /dev/null @@ -1,101 +0,0 @@ -import py - -from py._test.pluginmanager import HookRelay - -def pytest_funcarg___pytest(request): - return PytestArg(request) - -class PytestArg: - def __init__(self, request): - self.request = request - - def gethookrecorder(self, hook): - hookrecorder = HookRecorder(hook._registry) - hookrecorder.start_recording(hook._hookspecs) - self.request.addfinalizer(hookrecorder.finish_recording) - return hookrecorder - -class ParsedCall: - def __init__(self, name, locals): - assert '_name' not in locals - self.__dict__.update(locals) - self.__dict__.pop('self') - self._name = name - - def __repr__(self): - d = self.__dict__.copy() - del d['_name'] - return "" %(self._name, d) - -class HookRecorder: - def __init__(self, registry): - self._registry = registry - self.calls = [] - self._recorders = {} - - def start_recording(self, hookspecs): - if not isinstance(hookspecs, (list, tuple)): - hookspecs = [hookspecs] - for hookspec in hookspecs: - assert hookspec not in self._recorders - class RecordCalls: - _recorder = self - for name, method in vars(hookspec).items(): - if name[0] != "_": - setattr(RecordCalls, name, self._makecallparser(method)) - recorder = RecordCalls() - self._recorders[hookspec] = recorder - self._registry.register(recorder) - self.hook = HookRelay(hookspecs, registry=self._registry, - prefix="pytest_") - - def finish_recording(self): - for recorder in self._recorders.values(): - self._registry.unregister(recorder) - self._recorders.clear() - - def _makecallparser(self, method): - name = method.__name__ - args, varargs, varkw, default = py.std.inspect.getargspec(method) - if not args or args[0] != "self": - args.insert(0, 'self') - fspec = py.std.inspect.formatargspec(args, varargs, varkw, default) - # we use exec because we want to have early type - # errors on wrong input arguments, using - # *args/**kwargs delays this and gives errors - # elsewhere - exec (py.code.compile(""" - def %(name)s%(fspec)s: - self._recorder.calls.append( - ParsedCall(%(name)r, locals())) - """ % locals())) - return locals()[name] - - def getcalls(self, names): - if isinstance(names, str): - names = names.split() - for name in names: - for cls in self._recorders: - if name in vars(cls): - break - else: - raise ValueError("callname %r not found in %r" %( - name, self._recorders.keys())) - l = [] - for call in self.calls: - if call._name in names: - l.append(call) - return l - - def popcall(self, name): - for i, call in enumerate(self.calls): - if call._name == name: - del self.calls[i] - return call - raise ValueError("could not find call %r" %(name, )) - - def getcall(self, name): - l = self.getcalls(name) - assert len(l) == 1, (name, l) - return l[0] - diff --git a/py/_plugin/pytest_helpconfig.py b/py/_plugin/pytest_helpconfig.py deleted file mode 100644 --- a/py/_plugin/pytest_helpconfig.py +++ /dev/null @@ -1,164 +0,0 @@ -""" provide version info, conftest/environment config names. -""" -import py -import inspect, sys - -def pytest_addoption(parser): - group = parser.getgroup('debugconfig') - group.addoption('--version', action="store_true", - help="display py lib version and import information.") - group._addoption('-p', action="append", dest="plugins", default = [], - metavar="name", - help="early-load given plugin (multi-allowed).") - group.addoption('--traceconfig', - action="store_true", dest="traceconfig", default=False, - help="trace considerations of conftest.py files."), - group._addoption('--nomagic', - action="store_true", dest="nomagic", default=False, - help="don't reinterpret asserts, no traceback cutting. ") - group.addoption('--debug', - action="store_true", dest="debug", default=False, - help="generate and show internal debugging information.") - group.addoption("--help-config", action="store_true", dest="helpconfig", - help="show available conftest.py and ENV-variable names.") - - -def pytest_configure(__multicall__, config): - if config.option.version: - p = py.path.local(py.__file__).dirpath() - sys.stderr.write("This is py.test version %s, imported from %s\n" % - (py.__version__, p)) - sys.exit(0) - if not config.option.helpconfig: - return - __multicall__.execute() - options = [] - for group in config._parser._groups: - options.extend(group.options) - widths = [0] * 10 - tw = py.io.TerminalWriter() - tw.sep("-") - tw.line("%-13s | %-18s | %-25s | %s" %( - "cmdline name", "conftest.py name", "ENV-variable name", "help")) - tw.sep("-") - - options = [opt for opt in options if opt._long_opts] - options.sort(key=lambda x: x._long_opts) - for opt in options: - if not opt._long_opts or not opt.dest: - continue - optstrings = list(opt._long_opts) # + list(opt._short_opts) - optstrings = filter(None, optstrings) - optstring = "|".join(optstrings) - line = "%-13s | %-18s | %-25s | %s" %( - optstring, - "option_%s" % opt.dest, - "PYTEST_OPTION_%s" % opt.dest.upper(), - opt.help and opt.help or "", - ) - tw.line(line[:tw.fullwidth]) - for name, help in conftest_options: - line = "%-13s | %-18s | %-25s | %s" %( - "", - name, - "", - help, - ) - tw.line(line[:tw.fullwidth]) - - tw.sep("-") - sys.exit(0) - -conftest_options = ( - ('pytest_plugins', 'list of plugin names to load'), - ('collect_ignore', '(relative) paths ignored during collection'), - ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), -) - -def pytest_report_header(config): - lines = [] - if config.option.debug or config.option.traceconfig: - lines.append("using py lib: %s" % (py.path.local(py.__file__).dirpath())) - if config.option.traceconfig: - lines.append("active plugins:") - plugins = [] - items = config.pluginmanager._name2plugin.items() - for name, plugin in items: - lines.append(" %-20s: %s" %(name, repr(plugin))) - return lines - - -# ===================================================== -# validate plugin syntax and hooks -# ===================================================== - -def pytest_plugin_registered(manager, plugin): - methods = collectattr(plugin) - hooks = {} - for hookspec in manager.hook._hookspecs: - hooks.update(collectattr(hookspec)) - - stringio = py.io.TextIO() - def Print(*args): - if args: - stringio.write(" ".join(map(str, args))) - stringio.write("\n") - - fail = False - while methods: - name, method = methods.popitem() - #print "checking", name - if isgenerichook(name): - continue - if name not in hooks: - if not getattr(method, 'optionalhook', False): - Print("found unknown hook:", name) - fail = True - else: - #print "checking", method - method_args = getargs(method) - #print "method_args", method_args - if '__multicall__' in method_args: - method_args.remove('__multicall__') - hook = hooks[name] - hookargs = getargs(hook) - for arg in method_args: - if arg not in hookargs: - Print("argument %r not available" %(arg, )) - Print("actual definition: %s" %(formatdef(method))) - Print("available hook arguments: %s" % - ", ".join(hookargs)) - fail = True - break - #if not fail: - # print "matching hook:", formatdef(method) - if fail: - name = getattr(plugin, '__name__', plugin) - raise PluginValidationError("%s:\n%s" %(name, stringio.getvalue())) - -class PluginValidationError(Exception): - """ plugin failed validation. """ - -def isgenerichook(name): - return name == "pytest_plugins" or \ - name.startswith("pytest_funcarg__") - -def getargs(func): - args = inspect.getargs(py.code.getrawcode(func))[0] - startindex = inspect.ismethod(func) and 1 or 0 - return args[startindex:] - -def collectattr(obj, prefixes=("pytest_",)): - methods = {} - for apiname in dir(obj): - for prefix in prefixes: - if apiname.startswith(prefix): - methods[apiname] = getattr(obj, apiname) - return methods - -def formatdef(func): - return "%s%s" %( - func.__name__, - inspect.formatargspec(*inspect.getargspec(func)) - ) - diff --git a/py/_plugin/pytest_genscript.py b/py/_plugin/pytest_genscript.py deleted file mode 100755 --- a/py/_plugin/pytest_genscript.py +++ /dev/null @@ -1,69 +0,0 @@ -#! /usr/bin/env python -""" -generate standalone test script to be distributed along with an application. -""" - -import os -import sys -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group.addoption("--genscript", action="store", default=None, - dest="genscript", metavar="path", - help="create standalone py.test script at given target path.") - -def pytest_configure(config): - genscript = config.getvalue("genscript") - if genscript: - import py - mydir = py.path.local(__file__).dirpath() - infile = mydir.join("standalonetemplate.py") - pybasedir = py.path.local(py.__file__).dirpath().dirpath() - genscript = py.path.local(genscript) - main(pybasedir, outfile=genscript, infile=infile) - raise SystemExit(0) - -def main(pybasedir, outfile, infile): - import base64 - import zlib - try: - import pickle - except Importerror: - import cPickle as pickle - - outfile = str(outfile) - infile = str(infile) - assert os.path.isabs(outfile) - os.chdir(str(pybasedir)) - files = [] - for dirpath, dirnames, filenames in os.walk("py"): - for f in filenames: - if not f.endswith(".py"): - continue - - fn = os.path.join(dirpath, f) - files.append(fn) - - name2src = {} - for f in files: - k = f.replace(os.sep, ".")[:-3] - name2src[k] = open(f, "r").read() - - data = pickle.dumps(name2src, 2) - data = zlib.compress(data, 9) - data = base64.encodestring(data) - data = data.decode("ascii") - - exe = open(infile, "r").read() - exe = exe.replace("@SOURCES@", data) - - open(outfile, "w").write(exe) - os.chmod(outfile, 493) # 0755 - sys.stdout.write("generated standalone py.test at %r, have fun!\n" % outfile) - -if __name__=="__main__": - dn = os.path.dirname - here = os.path.abspath(dn(__file__)) # py/plugin/ - pybasedir = dn(dn(here)) - outfile = os.path.join(os.getcwd(), "py.test-standalone") - infile = os.path.join(here, 'standalonetemplate.py') - main(pybasedir, outfile, infile) diff --git a/py/_plugin/pytest_resultlog.py b/py/_plugin/pytest_resultlog.py deleted file mode 100644 --- a/py/_plugin/pytest_resultlog.py +++ /dev/null @@ -1,98 +0,0 @@ -"""non-xml machine-readable logging of test results. - Useful for buildbot integration code. See the `PyPy-test`_ - web page for post-processing. - -.. _`PyPy-test`: http://codespeak.net:8099/summary - -""" - -import py -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("resultlog", "resultlog plugin options") - group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None, - help="path for machine-readable result log.") - -def pytest_configure(config): - resultlog = config.option.resultlog - if resultlog: - logfile = open(resultlog, 'w', 1) # line buffered - config._resultlog = ResultLog(config, logfile) - config.pluginmanager.register(config._resultlog) - -def pytest_unconfigure(config): - resultlog = getattr(config, '_resultlog', None) - if resultlog: - resultlog.logfile.close() - del config._resultlog - config.pluginmanager.unregister(resultlog) - -def generic_path(item): - chain = item.listchain() - gpath = [chain[0].name] - fspath = chain[0].fspath - fspart = False - for node in chain[1:]: - newfspath = node.fspath - if newfspath == fspath: - if fspart: - gpath.append(':') - fspart = False - else: - gpath.append('.') - else: - gpath.append('/') - fspart = True - name = node.name - if name[0] in '([': - gpath.pop() - gpath.append(name) - fspath = newfspath - return ''.join(gpath) - -class ResultLog(object): - def __init__(self, config, logfile): - self.config = config - self.logfile = logfile # preferably line buffered - - def write_log_entry(self, testpath, shortrepr, longrepr): - print_("%s %s" % (shortrepr, testpath), file=self.logfile) - for line in longrepr.splitlines(): - print_(" %s" % line, file=self.logfile) - - def log_outcome(self, node, shortrepr, longrepr): - testpath = generic_path(node) - self.write_log_entry(testpath, shortrepr, longrepr) - - def pytest_runtest_logreport(self, report): - res = self.config.hook.pytest_report_teststatus(report=report) - if res is not None: - code = res[1] - else: - code = report.shortrepr - if code == 'x': - longrepr = str(report.longrepr) - elif code == 'X': - longrepr = '' - elif report.passed: - longrepr = "" - elif report.failed: - longrepr = str(report.longrepr) - elif report.skipped: - longrepr = str(report.longrepr.reprcrash.message) - self.log_outcome(report.item, code, longrepr) - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - code = "F" - else: - assert report.skipped - code = "S" - longrepr = str(report.longrepr.reprcrash) - self.log_outcome(report.collector, code, longrepr) - - def pytest_internalerror(self, excrepr): - path = excrepr.reprcrash.path - self.write_log_entry(path, '!', str(excrepr)) diff --git a/py/bin/win32/py.countloc.cmd b/py/bin/win32/py.countloc.cmd deleted file mode 100644 --- a/py/bin/win32/py.countloc.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.countloc" %* \ No newline at end of file diff --git a/py/bin/env.cmd b/py/bin/env.cmd deleted file mode 100644 --- a/py/bin/env.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i diff --git a/py/bin/py.cleanup b/py/bin/py.cleanup deleted file mode 100755 --- a/py/bin/py.cleanup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycleanup() \ No newline at end of file diff --git a/py/_test/pluginmanager.py b/py/_test/pluginmanager.py deleted file mode 100644 --- a/py/_test/pluginmanager.py +++ /dev/null @@ -1,353 +0,0 @@ -""" -managing loading and interacting with pytest plugins. -""" -import py -import inspect -from py._plugin import hookspec - -default_plugins = ( - "default runner capture mark terminal skipping tmpdir monkeypatch " - "recwarn pdb pastebin unittest helpconfig nose assertion genscript " - "junitxml doctest").split() - -def check_old_use(mod, modname): - clsname = modname[len('pytest_'):].capitalize() + "Plugin" - assert not hasattr(mod, clsname), (mod, clsname) - -class PluginManager(object): - def __init__(self): - self.registry = Registry() - self._name2plugin = {} - self._hints = [] - self.hook = HookRelay([hookspec], registry=self.registry) - self.register(self) - for spec in default_plugins: - self.import_plugin(spec) - - def _getpluginname(self, plugin, name): - if name is None: - if hasattr(plugin, '__name__'): - name = plugin.__name__.split(".")[-1] - else: - name = id(plugin) - return name - - def register(self, plugin, name=None): - assert not self.isregistered(plugin), plugin - assert not self.registry.isregistered(plugin), plugin - name = self._getpluginname(plugin, name) - if name in self._name2plugin: - return False - self._name2plugin[name] = plugin - self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self}) - self.hook.pytest_plugin_registered(manager=self, plugin=plugin) - self.registry.register(plugin) - return True - - def unregister(self, plugin): - self.hook.pytest_plugin_unregistered(plugin=plugin) - self.registry.unregister(plugin) - for name, value in list(self._name2plugin.items()): - if value == plugin: - del self._name2plugin[name] - - def isregistered(self, plugin, name=None): - if self._getpluginname(plugin, name) in self._name2plugin: - return True - for val in self._name2plugin.values(): - if plugin == val: - return True - - def addhooks(self, spec): - self.hook._addhooks(spec, prefix="pytest_") - - def getplugins(self): - return list(self.registry) - - def skipifmissing(self, name): - if not self.hasplugin(name): - py.test.skip("plugin %r is missing" % name) - - def hasplugin(self, name): - try: - self.getplugin(name) - except KeyError: - return False - else: - return True - - def getplugin(self, name): - try: - return self._name2plugin[name] - except KeyError: - impname = canonical_importname(name) - return self._name2plugin[impname] - - # API for bootstrapping - # - def _envlist(self, varname): - val = py.std.os.environ.get(varname, None) - if val is not None: - return val.split(',') - return () - - def consider_env(self): - for spec in self._envlist("PYTEST_PLUGINS"): - self.import_plugin(spec) - - def consider_setuptools_entrypoints(self): - try: - from pkg_resources import iter_entry_points - except ImportError: - return # XXX issue a warning - for ep in iter_entry_points('pytest11'): - name = canonical_importname(ep.name) - if name in self._name2plugin: - continue - plugin = ep.load() - self.register(plugin, name=name) - - def consider_preparse(self, args): - for opt1,opt2 in zip(args, args[1:]): - if opt1 == "-p": - self.import_plugin(opt2) - - def consider_conftest(self, conftestmodule): - cls = getattr(conftestmodule, 'ConftestPlugin', None) - if cls is not None: - raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, " - "were removed in 1.0.0b2" % (cls,)) - if self.register(conftestmodule, name=conftestmodule.__file__): - self.consider_module(conftestmodule) - - def consider_module(self, mod): - attr = getattr(mod, "pytest_plugins", ()) - if attr: - if not isinstance(attr, (list, tuple)): - attr = (attr,) - for spec in attr: - self.import_plugin(spec) - - def import_plugin(self, spec): - assert isinstance(spec, str) - modname = canonical_importname(spec) - if modname in self._name2plugin: - return - try: - mod = importplugin(modname) - except KeyboardInterrupt: - raise - except py.test.skip.Exception: - e = py.std.sys.exc_info()[1] - self._hints.append("skipped plugin %r: %s" %((modname, e.msg))) - else: - check_old_use(mod, modname) - self.register(mod) - self.consider_module(mod) - - def pytest_terminal_summary(self, terminalreporter): - tw = terminalreporter._tw - if terminalreporter.config.option.traceconfig: - for hint in self._hints: - tw.line("hint: %s" % hint) - - # - # - # API for interacting with registered and instantiated plugin objects - # - # - def listattr(self, attrname, plugins=None): - return self.registry.listattr(attrname, plugins=plugins) - - def notify_exception(self, excinfo=None): - if excinfo is None: - excinfo = py.code.ExceptionInfo() - excrepr = excinfo.getrepr(funcargs=True, showlocals=True) - return self.hook.pytest_internalerror(excrepr=excrepr) - - def do_addoption(self, parser): - mname = "pytest_addoption" - methods = self.registry.listattr(mname, reverse=True) - mc = MultiCall(methods, {'parser': parser}) - mc.execute() - - def pytest_plugin_registered(self, plugin): - dic = self.call_plugin(plugin, "pytest_namespace", {}) or {} - for name, value in dic.items(): - setattr(py.test, name, value) - py.test.__all__.append(name) - if hasattr(self, '_config'): - self.call_plugin(plugin, "pytest_addoption", - {'parser': self._config._parser}) - self.call_plugin(plugin, "pytest_configure", - {'config': self._config}) - - def call_plugin(self, plugin, methname, kwargs): - return MultiCall( - methods=self.listattr(methname, plugins=[plugin]), - kwargs=kwargs, firstresult=True).execute() - - def do_configure(self, config): - assert not hasattr(self, '_config') - self._config = config - config.hook.pytest_configure(config=self._config) - - def do_unconfigure(self, config): - config = self._config - del self._config - config.hook.pytest_unconfigure(config=config) - config.pluginmanager.unregister(self) - -def canonical_importname(name): - name = name.lower() - modprefix = "pytest_" - if not name.startswith(modprefix): - name = modprefix + name - return name - -def importplugin(importspec): - try: - return __import__(importspec) - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - try: - return __import__("py._plugin.%s" %(importspec), - None, None, '__doc__') - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - # show the original exception, not the failing internal one - return __import__(importspec) - - -class MultiCall: - """ execute a call into multiple python functions/methods. """ - - def __init__(self, methods, kwargs, firstresult=False): - self.methods = methods[:] - self.kwargs = kwargs.copy() - self.kwargs['__multicall__'] = self - self.results = [] - self.firstresult = firstresult - - def __repr__(self): - status = "%d results, %d meths" % (len(self.results), len(self.methods)) - return "" %(status, self.kwargs) - - def execute(self): - while self.methods: - method = self.methods.pop() - kwargs = self.getkwargs(method) - res = method(**kwargs) - if res is not None: - self.results.append(res) - if self.firstresult: - return res - if not self.firstresult: - return self.results - - def getkwargs(self, method): - kwargs = {} - for argname in varnames(method): - try: - kwargs[argname] = self.kwargs[argname] - except KeyError: - pass # might be optional param - return kwargs - -def varnames(func): - ismethod = inspect.ismethod(func) - rawcode = py.code.getrawcode(func) - try: - return rawcode.co_varnames[ismethod:] - except AttributeError: - return () - -class Registry: - """ - Manage Plugins: register/unregister call calls to plugins. - """ - def __init__(self, plugins=None): - if plugins is None: - plugins = [] - self._plugins = plugins - - def register(self, plugin): - assert not isinstance(plugin, str) - assert not plugin in self._plugins - self._plugins.append(plugin) - - def unregister(self, plugin): - self._plugins.remove(plugin) - - def isregistered(self, plugin): - return plugin in self._plugins - - def __iter__(self): - return iter(self._plugins) - - def listattr(self, attrname, plugins=None, reverse=False): - l = [] - if plugins is None: - plugins = self._plugins - for plugin in plugins: - try: - l.append(getattr(plugin, attrname)) - except AttributeError: - continue - if reverse: - l.reverse() - return l - -class HookRelay: - def __init__(self, hookspecs, registry, prefix="pytest_"): - if not isinstance(hookspecs, list): - hookspecs = [hookspecs] - self._hookspecs = [] - self._registry = registry - for hookspec in hookspecs: - self._addhooks(hookspec, prefix) - - def _addhooks(self, hookspecs, prefix): - self._hookspecs.append(hookspecs) - added = False - for name, method in vars(hookspecs).items(): - if name.startswith(prefix): - if not method.__doc__: - raise ValueError("docstring required for hook %r, in %r" - % (method, hookspecs)) - firstresult = getattr(method, 'firstresult', False) - hc = HookCaller(self, name, firstresult=firstresult) - setattr(self, name, hc) - added = True - #print ("setting new hook", name) - if not added: - raise ValueError("did not find new %r hooks in %r" %( - prefix, hookspecs,)) - - - def _performcall(self, name, multicall): - return multicall.execute() - -class HookCaller: - def __init__(self, hookrelay, name, firstresult): - self.hookrelay = hookrelay - self.name = name - self.firstresult = firstresult - - def __repr__(self): - return "" %(self.name,) - - def __call__(self, **kwargs): - methods = self.hookrelay._registry.listattr(self.name) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - - def pcall(self, plugins, **kwargs): - methods = self.hookrelay._registry.listattr(self.name, plugins=plugins) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - diff --git a/py/_cmdline/pytest.py b/py/_cmdline/pytest.py deleted file mode 100755 --- a/py/_cmdline/pytest.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -import py - -def main(args=None): - raise SystemExit(py.test.cmdline.main(args)) diff --git a/py/_plugin/standalonetemplate.py b/py/_plugin/standalonetemplate.py deleted file mode 100755 --- a/py/_plugin/standalonetemplate.py +++ /dev/null @@ -1,63 +0,0 @@ -#! /usr/bin/env python - -sources = """ - at SOURCES@""" - -import sys -import base64 -import zlib -import imp - -class DictImporter(object): - def __init__(self, sources): - self.sources = sources - - def find_module(self, fullname, path=None): - if fullname in self.sources: - return self - if fullname+'.__init__' in self.sources: - return self - return None - - def load_module(self, fullname): - # print "load_module:", fullname - from types import ModuleType - try: - s = self.sources[fullname] - is_pkg = False - except KeyError: - s = self.sources[fullname+'.__init__'] - is_pkg = True - - co = compile(s, fullname, 'exec') - module = sys.modules.setdefault(fullname, ModuleType(fullname)) - module.__file__ = "%s/%s" % (__file__, fullname) - module.__loader__ = self - if is_pkg: - module.__path__ = [fullname] - - do_exec(co, module.__dict__) - return sys.modules[fullname] - - def get_source(self, name): - res = self.sources.get(name) - if res is None: - res = self.sources.get(name+'.__init__') - return res - -if __name__ == "__main__": - if sys.version_info >= (3,0): - exec("def do_exec(co, loc): exec(co, loc)\n") - import pickle - sources = sources.encode("ascii") # ensure bytes - sources = pickle.loads(zlib.decompress(base64.decodebytes(sources))) - else: - import cPickle as pickle - exec("def do_exec(co, loc): exec co in loc\n") - sources = pickle.loads(zlib.decompress(base64.decodestring(sources))) - - importer = DictImporter(sources) - sys.meta_path.append(importer) - - import py - py.cmdline.pytest() diff --git a/py/bin/win32/py.lookup.cmd b/py/bin/win32/py.lookup.cmd deleted file mode 100644 --- a/py/bin/win32/py.lookup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.lookup" %* \ No newline at end of file diff --git a/py/bin/win32/py.which.cmd b/py/bin/win32/py.which.cmd deleted file mode 100644 --- a/py/bin/win32/py.which.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.which" %* \ No newline at end of file diff --git a/py/_compat/dep_optparse.py b/py/_compat/dep_optparse.py deleted file mode 100644 --- a/py/_compat/dep_optparse.py +++ /dev/null @@ -1,4 +0,0 @@ -import py -py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg") - -optparse = py.std.optparse diff --git a/py/_plugin/pytest_terminal.py b/py/_plugin/pytest_terminal.py deleted file mode 100644 --- a/py/_plugin/pytest_terminal.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -Implements terminal reporting of the full testing process. - -This is a good source for looking at the various reporting hooks. -""" -import py -import sys - -optionalhook = py.test.mark.optionalhook - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting", "reporting", after="general") - group._addoption('-v', '--verbose', action="count", - dest="verbose", default=0, help="increase verbosity."), - group._addoption('-r', - action="store", dest="reportchars", default=None, metavar="chars", - help="show extra test summary info as specified by chars (f)ailed, " - "(s)skipped, (x)failed, (X)passed.") - group._addoption('-l', '--showlocals', - action="store_true", dest="showlocals", default=False, - help="show locals in tracebacks (disabled by default).") - group._addoption('--report', - action="store", dest="report", default=None, metavar="opts", - help="(deprecated, use -r)") - group._addoption('--tb', metavar="style", - action="store", dest="tbstyle", default='long', - type="choice", choices=['long', 'short', 'no', 'line'], - help="traceback print mode (long/short/line/no).") - group._addoption('--fulltrace', - action="store_true", dest="fulltrace", default=False, - help="don't cut any tracebacks (default is to cut).") - group._addoption('--funcargs', - action="store_true", dest="showfuncargs", default=False, - help="show available function arguments, sorted by plugin") - -def pytest_configure(config): - if config.option.collectonly: - reporter = CollectonlyReporter(config) - elif config.option.showfuncargs: - config.setsessionclass(ShowFuncargSession) - reporter = None - else: - reporter = TerminalReporter(config) - if reporter: - # XXX see remote.py's XXX - for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth': - if hasattr(config, attr): - #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr) - name = attr.split("_")[-1] - assert hasattr(self.reporter._tw, name), name - setattr(reporter._tw, name, getattr(config, attr)) - config.pluginmanager.register(reporter, 'terminalreporter') - -def getreportopt(config): - reportopts = "" - optvalue = config.getvalue("report") - if optvalue: - py.builtin.print_("DEPRECATED: use -r instead of --report option.", - file=py.std.sys.stderr) - if optvalue: - for setting in optvalue.split(","): - setting = setting.strip() - if setting == "skipped": - reportopts += "s" - elif setting == "xfailed": - reportopts += "x" - reportchars = config.getvalue("reportchars") - if reportchars: - for char in reportchars: - if char not in reportopts: - reportopts += char - return reportopts - -class TerminalReporter: - def __init__(self, config, file=None): - self.config = config - self.stats = {} - self.curdir = py.path.local() - if file is None: - file = py.std.sys.stdout - self._tw = py.io.TerminalWriter(file) - self.currentfspath = None - self.gateway2info = {} - self.reportchars = getreportopt(config) - - def hasopt(self, char): - char = {'xfailed': 'x', 'skipped': 's'}.get(char,char) - return char in self.reportchars - - def write_fspath_result(self, fspath, res): - fspath = self.curdir.bestrelpath(fspath) - if fspath != self.currentfspath: - self._tw.line() - relpath = self.curdir.bestrelpath(fspath) - self._tw.write(relpath + " ") - self.currentfspath = fspath - self._tw.write(res) - - def write_ensure_prefix(self, prefix, extra="", **kwargs): - if self.currentfspath != prefix: - self._tw.line() - self.currentfspath = prefix - self._tw.write(prefix) - if extra: - self._tw.write(extra, **kwargs) - self.currentfspath = -2 - - def ensure_newline(self): - if self.currentfspath: - self._tw.line() - self.currentfspath = None - - def write_line(self, line, **markup): - line = str(line) - self.ensure_newline() - self._tw.line(line, **markup) - - def write_sep(self, sep, title=None, **markup): - self.ensure_newline() - self._tw.sep(sep, title, **markup) - - def getcategoryletterword(self, rep): - res = self.config.hook.pytest_report_teststatus(report=rep) - if res: - return res - for cat in 'skipped failed passed ???'.split(): - if getattr(rep, cat, None): - break - return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep) - - def getoutcomeletter(self, rep): - return rep.shortrepr - - def getoutcomeword(self, rep): - if rep.passed: - return "PASS", dict(green=True) - elif rep.failed: - return "FAIL", dict(red=True) - elif rep.skipped: - return "SKIP" - else: - return "???", dict(red=True) - - def gettestid(self, item, relative=True): - fspath = item.fspath - chain = [x for x in item.listchain() if x.fspath == fspath] - chain = chain[1:] - names = [x.name for x in chain if x.name != "()"] - path = item.fspath - if relative: - relpath = path.relto(self.curdir) - if relpath: - path = relpath - names.insert(0, str(path)) - return "::".join(names) - - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.write_line("INTERNALERROR> " + line) - - def pytest_plugin_registered(self, plugin): - if self.config.option.traceconfig: - msg = "PLUGIN registered: %s" %(plugin,) - # XXX this event may happen during setup/teardown time - # which unfortunately captures our output here - # which garbles our output if we use self.write_line - self.write_line(msg) - - @optionalhook - def pytest_gwmanage_newgateway(self, gateway, platinfo): - #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec)) - d = {} - d['version'] = repr_pythonversion(platinfo.version_info) - d['id'] = gateway.id - d['spec'] = gateway.spec._spec - d['platform'] = platinfo.platform - if self.config.option.verbose: - d['extra'] = "- " + platinfo.executable - else: - d['extra'] = "" - d['cwd'] = platinfo.cwd - infoline = ("[%(id)s] %(spec)s -- platform %(platform)s, " - "Python %(version)s " - "cwd: %(cwd)s" - "%(extra)s" % d) - self.write_line(infoline) - self.gateway2info[gateway] = infoline - - @optionalhook - def pytest_testnodeready(self, node): - self.write_line("[%s] txnode ready to receive tests" %(node.gateway.id,)) - - @optionalhook - def pytest_testnodedown(self, node, error): - if error: - self.write_line("[%s] node down, error: %s" %(node.gateway.id, error)) - - @optionalhook - def pytest_rescheduleitems(self, items): - if self.config.option.debug: - self.write_sep("!", "RESCHEDULING %s " %(items,)) - - @optionalhook - def pytest_looponfailinfo(self, failreports, rootdirs): - if failreports: - self.write_sep("#", "LOOPONFAILING", red=True) - for report in failreports: - loc = self._getcrashline(report) - self.write_line(loc, red=True) - self.write_sep("#", "waiting for changes") - for rootdir in rootdirs: - self.write_line("### Watching: %s" %(rootdir,), bold=True) - - - def pytest_trace(self, category, msg): - if self.config.option.debug or \ - self.config.option.traceconfig and category.find("config") != -1: - self.write_line("[%s] %s" %(category, msg)) - - def pytest_deselected(self, items): - self.stats.setdefault('deselected', []).append(items) - - def pytest_itemstart(self, item, node=None): - if getattr(self.config.option, 'dist', 'no') != "no": - # for dist-testing situations itemstart means we - # queued the item for sending, not interesting (unless debugging) - if self.config.option.debug: - line = self._reportinfoline(item) - extra = "" - if node: - extra = "-> [%s]" % node.gateway.id - self.write_ensure_prefix(line, extra) - else: - if self.config.option.verbose: - line = self._reportinfoline(item) - self.write_ensure_prefix(line, "") - else: - # ensure that the path is printed before the - # 1st test of a module starts running - - self.write_fspath_result(self._getfspath(item), "") - - def pytest__teardown_final_logerror(self, report): - self.stats.setdefault("error", []).append(report) - - def pytest_runtest_logreport(self, report): - rep = report - cat, letter, word = self.getcategoryletterword(rep) - if not letter and not word: - # probably passed setup/teardown - return - if isinstance(word, tuple): - word, markup = word - else: - markup = {} - self.stats.setdefault(cat, []).append(rep) - if not self.config.option.verbose: - self.write_fspath_result(self._getfspath(rep.item), letter) - else: - line = self._reportinfoline(rep.item) - if not hasattr(rep, 'node'): - self.write_ensure_prefix(line, word, **markup) - else: - self.ensure_newline() - if hasattr(rep, 'node'): - self._tw.write("[%s] " % rep.node.gateway.id) - self._tw.write(word, **markup) - self._tw.write(" " + line) - self.currentfspath = -2 - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.stats.setdefault("error", []).append(report) - msg = report.longrepr.reprcrash.message - self.write_fspath_result(report.collector.fspath, "E") - elif report.skipped: - self.stats.setdefault("skipped", []).append(report) - self.write_fspath_result(report.collector.fspath, "S") - - def pytest_sessionstart(self, session): - self.write_sep("=", "test session starts", bold=True) - self._sessionstarttime = py.std.time.time() - - verinfo = ".".join(map(str, sys.version_info[:3])) - msg = "platform %s -- Python %s" % (sys.platform, verinfo) - msg += " -- pytest-%s" % (py.__version__) - if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None): - msg += " -- " + str(sys.executable) - self.write_line(msg) - lines = self.config.hook.pytest_report_header(config=self.config) - lines.reverse() - for line in flatten(lines): - self.write_line(line) - for i, testarg in enumerate(self.config.args): - self.write_line("test object %d: %s" %(i+1, testarg)) - - def pytest_sessionfinish(self, exitstatus, __multicall__): - __multicall__.execute() - self._tw.line("") - if exitstatus in (0, 1, 2): - self.summary_errors() - self.summary_failures() - self.config.hook.pytest_terminal_summary(terminalreporter=self) - if exitstatus == 2: - self._report_keyboardinterrupt() - self.summary_deselected() - self.summary_stats() - - def pytest_keyboard_interrupt(self, excinfo): - self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) - - def _report_keyboardinterrupt(self): - excrepr = self._keyboardinterrupt_memo - msg = excrepr.reprcrash.message - self.write_sep("!", msg) - if "KeyboardInterrupt" in msg: - if self.config.getvalue("fulltrace"): - excrepr.toterminal(self._tw) - else: - excrepr.reprcrash.toterminal(self._tw) - - def _getcrashline(self, report): - try: - return report.longrepr.reprcrash - except AttributeError: - return str(report.longrepr)[:50] - - def _reportinfoline(self, item): - collect_fspath = self._getfspath(item) - fspath, lineno, msg = self._getreportinfo(item) - if fspath and fspath != collect_fspath: - fspath = "%s <- %s" % ( - self.curdir.bestrelpath(collect_fspath), - self.curdir.bestrelpath(fspath)) - elif fspath: - fspath = self.curdir.bestrelpath(fspath) - if lineno is not None: - lineno += 1 - if fspath and lineno and msg: - line = "%(fspath)s:%(lineno)s: %(msg)s" - elif fspath and msg: - line = "%(fspath)s: %(msg)s" - elif fspath and lineno: - line = "%(fspath)s:%(lineno)s %(extrapath)s" - else: - line = "[noreportinfo]" - return line % locals() + " " - - def _getfailureheadline(self, rep): - if hasattr(rep, "collector"): - return str(rep.collector.fspath) - elif hasattr(rep, 'item'): - fspath, lineno, msg = self._getreportinfo(rep.item) - return msg - else: - return "test session" - - def _getreportinfo(self, item): - try: - return item.__reportinfo - except AttributeError: - pass - reportinfo = item.config.hook.pytest_report_iteminfo(item=item) - # cache on item - item.__reportinfo = reportinfo - return reportinfo - - def _getfspath(self, item): - try: - return item.fspath - except AttributeError: - fspath, lineno, msg = self._getreportinfo(item) - return fspath - - # - # summaries for sessionfinish - # - - def summary_failures(self): - tbstyle = self.config.getvalue("tbstyle") - if 'failed' in self.stats and tbstyle != "no": - self.write_sep("=", "FAILURES") - for rep in self.stats['failed']: - if tbstyle == "line": - line = self._getcrashline(rep) - self.write_line(line) - else: - msg = self._getfailureheadline(rep) - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def summary_errors(self): - if 'error' in self.stats and self.config.option.tbstyle != "no": - self.write_sep("=", "ERRORS") - for rep in self.stats['error']: - msg = self._getfailureheadline(rep) - if not hasattr(rep, 'when'): - # collect - msg = "ERROR during collection " + msg - elif rep.when == "setup": - msg = "ERROR at setup of " + msg - elif rep.when == "teardown": - msg = "ERROR at teardown of " + msg - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def write_platinfo(self, rep): - if hasattr(rep, 'node'): - self.write_line(self.gateway2info.get( - rep.node.gateway, - "node %r (platinfo not found? strange)") - [:self._tw.fullwidth-1]) - - def summary_stats(self): - session_duration = py.std.time.time() - self._sessionstarttime - - keys = "failed passed skipped deselected".split() - for key in self.stats.keys(): - if key not in keys: - keys.append(key) - parts = [] - for key in keys: - val = self.stats.get(key, None) - if val: - parts.append("%d %s" %(len(val), key)) - line = ", ".join(parts) - # XXX coloring - self.write_sep("=", "%s in %.2f seconds" %(line, session_duration)) - - def summary_deselected(self): - if 'deselected' in self.stats: - self.write_sep("=", "%d tests deselected by %r" %( - len(self.stats['deselected']), self.config.option.keyword), bold=True) - - -class CollectonlyReporter: - INDENT = " " - - def __init__(self, config, out=None): - self.config = config - if out is None: - out = py.std.sys.stdout - self.out = py.io.TerminalWriter(out) - self.indent = "" - self._failed = [] - - def outindent(self, line): - self.out.line(self.indent + str(line)) - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.out.line("INTERNALERROR> " + line) - - def pytest_collectstart(self, collector): - self.outindent(collector) - self.indent += self.INDENT - - def pytest_itemstart(self, item, node=None): - self.outindent(item) - - def pytest_collectreport(self, report): - if not report.passed: - self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message) - self._failed.append(report) - self.indent = self.indent[:-len(self.INDENT)] - - def pytest_sessionfinish(self, session, exitstatus): - if self._failed: - self.out.sep("!", "collection failures") - for rep in self._failed: - rep.toterminal(self.out) - - -def repr_pythonversion(v=None): - if v is None: - v = sys.version_info - try: - return "%s.%s.%s-%s-%s" % v - except (TypeError, ValueError): - return str(v) - -def flatten(l): - for x in l: - if isinstance(x, (list, tuple)): - for y in flatten(x): - yield y - else: - yield x - -from py._test.session import Session -class ShowFuncargSession(Session): - def main(self, colitems): - self.fspath = py.path.local() - self.sessionstarts() - try: - self.showargs(colitems[0]) - finally: - self.sessionfinishes(exitstatus=1) - - def showargs(self, colitem): - tw = py.io.TerminalWriter() - from py._test.funcargs import getplugins - from py._test.funcargs import FuncargRequest - plugins = getplugins(colitem, withpy=True) - verbose = self.config.getvalue("verbose") - for plugin in plugins: - available = [] - for name, factory in vars(plugin).items(): - if name.startswith(FuncargRequest._argprefix): - name = name[len(FuncargRequest._argprefix):] - if name not in available: - available.append([name, factory]) - if available: - pluginname = plugin.__name__ - for name, factory in available: - loc = self.getlocation(factory) - if verbose: - funcargspec = "%s -- %s" %(name, loc,) - else: - funcargspec = name - tw.line(funcargspec, green=True) - doc = factory.__doc__ or "" - if doc: - for line in doc.split("\n"): - tw.line(" " + line.strip()) - else: - tw.line(" %s: no docstring available" %(loc,), - red=True) - - def getlocation(self, function): - import inspect - fn = py.path.local(inspect.getfile(function)) - lineno = py.builtin._getcode(function).co_firstlineno - if fn.relto(self.fspath): - fn = fn.relto(self.fspath) - return "%s:%d" %(fn, lineno+1) diff --git a/py/apipkg.py b/py/apipkg.py deleted file mode 100644 --- a/py/apipkg.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -apipkg: control the exported namespace of a python package. - -see http://pypi.python.org/pypi/apipkg - -(c) holger krekel, 2009 - MIT license -""" -import sys -from types import ModuleType - -__version__ = "1.0b6" - -def initpkg(pkgname, exportdefs): - """ initialize given package from the export definitions. """ - mod = ApiModule(pkgname, exportdefs, implprefix=pkgname) - oldmod = sys.modules[pkgname] - mod.__file__ = getattr(oldmod, '__file__', None) - mod.__version__ = getattr(oldmod, '__version__', '0') - for name in ('__path__', '__loader__'): - if hasattr(oldmod, name): - setattr(mod, name, getattr(oldmod, name)) - sys.modules[pkgname] = mod - -def importobj(modpath, attrname): - module = __import__(modpath, None, None, ['__doc__']) - return getattr(module, attrname) - -class ApiModule(ModuleType): - def __init__(self, name, importspec, implprefix=None): - self.__name__ = name - self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] - self.__map__ = {} - self.__implprefix__ = implprefix or name - for name, importspec in importspec.items(): - if isinstance(importspec, dict): - subname = '%s.%s'%(self.__name__, name) - apimod = ApiModule(subname, importspec, implprefix) - sys.modules[subname] = apimod - setattr(self, name, apimod) - else: - modpath, attrname = importspec.split(':') - if modpath[0] == '.': - modpath = implprefix + modpath - if name == '__doc__': - self.__doc__ = importobj(modpath, attrname) - else: - self.__map__[name] = (modpath, attrname) - - def __repr__(self): - l = [] - if hasattr(self, '__version__'): - l.append("version=" + repr(self.__version__)) - if hasattr(self, '__file__'): - l.append('from ' + repr(self.__file__)) - if l: - return '' % (self.__name__, " ".join(l)) - return '' % (self.__name__,) - - def __makeattr(self, name): - """lazily compute value for name or raise AttributeError if unknown.""" - target = None - if '__onfirstaccess__' in self.__map__: - target = self.__map__.pop('__onfirstaccess__') - importobj(*target)() - try: - modpath, attrname = self.__map__[name] - except KeyError: - if target is not None and name != '__onfirstaccess__': - # retry, onfirstaccess might have set attrs - return getattr(self, name) - raise AttributeError(name) - else: - result = importobj(modpath, attrname) - setattr(self, name, result) - try: - del self.__map__[name] - except KeyError: - pass # in a recursive-import situation a double-del can happen - return result - - __getattr__ = __makeattr - - def __dict__(self): - # force all the content of the module to be loaded when __dict__ is read - dictdescr = ModuleType.__dict__['__dict__'] - dict = dictdescr.__get__(self) - if dict is not None: - hasattr(self, 'some') - for name in self.__all__: - try: - self.__makeattr(name) - except AttributeError: - pass - return dict - __dict__ = property(__dict__) diff --git a/pypy/tool/test/test_conftest1.py b/pypy/tool/test/test_conftest1.py deleted file mode 100644 --- a/pypy/tool/test/test_conftest1.py +++ /dev/null @@ -1,32 +0,0 @@ - -import py - -innertest = py.path.local(__file__).dirpath('conftest1_innertest.py') -pytest_plugins = "pytest_pytester" - -class TestPyPyTests: - def test_select_interplevel(self, testdir): - sorter = testdir.inline_run("-k", "interplevel", innertest) - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - assert not skipped and not failed - for repevent in passed: - assert repevent.item.name in ('test_something', 'test_method') - - def test_select_applevel(self, testdir): - sorter = testdir.inline_run("-k", "applevel", innertest) - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - assert not skipped and not failed - for repevent in passed: - assert repevent.item.name in ('app_test_something', 'test_method_app') - - def test_appdirect(self, testdir): - sorter = testdir.inline_run(innertest, '-k', 'applevel', '--runappdirect') - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - print passed - names = [x.item.name for x in passed] - assert 'app_test_something' in names - assert 'test_method_app' in names - diff --git a/py/bin/win32/py.cleanup.cmd b/py/bin/win32/py.cleanup.cmd deleted file mode 100644 --- a/py/bin/win32/py.cleanup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.cleanup" %* \ No newline at end of file diff --git a/py/_plugin/pytest_recwarn.py b/py/_plugin/pytest_recwarn.py deleted file mode 100644 --- a/py/_plugin/pytest_recwarn.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -helpers for asserting deprecation and other warnings. - -Example usage ---------------------- - -You can use the ``recwarn`` funcarg to track -warnings within a test function: - -.. sourcecode:: python - - def test_hello(recwarn): - from warnings import warn - warn("hello", DeprecationWarning) - w = recwarn.pop(DeprecationWarning) - assert issubclass(w.category, DeprecationWarning) - assert 'hello' in str(w.message) - assert w.filename - assert w.lineno - -You can also call a global helper for checking -taht a certain function call yields a Deprecation -warning: - -.. sourcecode:: python - - import py - - def test_global(): - py.test.deprecated_call(myfunction, 17) - - -""" - -import py -import os - -def pytest_funcarg__recwarn(request): - """Return a WarningsRecorder instance that provides these methods: - - * ``pop(category=None)``: return last warning matching the category. - * ``clear()``: clear list of warnings - """ - warnings = WarningsRecorder() - request.addfinalizer(warnings.finalize) - return warnings - -def pytest_namespace(): - return {'deprecated_call': deprecated_call} - -def deprecated_call(func, *args, **kwargs): - """ assert that calling func(*args, **kwargs) - triggers a DeprecationWarning. - """ - warningmodule = py.std.warnings - l = [] - oldwarn_explicit = getattr(warningmodule, 'warn_explicit') - def warn_explicit(*args, **kwargs): - l.append(args) - oldwarn_explicit(*args, **kwargs) - oldwarn = getattr(warningmodule, 'warn') - def warn(*args, **kwargs): - l.append(args) - oldwarn(*args, **kwargs) - - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - try: - ret = func(*args, **kwargs) - finally: - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - if not l: - #print warningmodule - __tracebackhide__ = True - raise AssertionError("%r did not produce DeprecationWarning" %(func,)) - return ret - - -class RecordedWarning: - def __init__(self, message, category, filename, lineno, line): - self.message = message - self.category = category - self.filename = filename - self.lineno = lineno - self.line = line - -class WarningsRecorder: - def __init__(self): - warningmodule = py.std.warnings - self.list = [] - def showwarning(message, category, filename, lineno, line=0): - self.list.append(RecordedWarning( - message, category, filename, lineno, line)) - try: - self.old_showwarning(message, category, - filename, lineno, line=line) - except TypeError: - # < python2.6 - self.old_showwarning(message, category, filename, lineno) - self.old_showwarning = warningmodule.showwarning - warningmodule.showwarning = showwarning - - def pop(self, cls=Warning): - """ pop the first recorded warning, raise exception if not exists.""" - for i, w in enumerate(self.list): - if issubclass(w.category, cls): - return self.list.pop(i) - __tracebackhide__ = True - assert 0, "%r not found in %r" %(cls, self.list) - - #def resetregistry(self): - # import warnings - # warnings.onceregistry.clear() - # warnings.__warningregistry__.clear() - - def clear(self): - self.list[:] = [] - - def finalize(self): - py.std.warnings.showwarning = self.old_showwarning diff --git a/py/_test/cmdline.py b/py/_test/cmdline.py deleted file mode 100644 --- a/py/_test/cmdline.py +++ /dev/null @@ -1,24 +0,0 @@ -import py -import sys - -# -# main entry point -# - -def main(args=None): - if args is None: - args = sys.argv[1:] - config = py.test.config - try: - config.parse(args) - config.pluginmanager.do_configure(config) - session = config.initsession() - colitems = config.getinitialnodes() - exitstatus = session.main(colitems) - config.pluginmanager.do_unconfigure(config) - except config.Error: - e = sys.exc_info()[1] - sys.stderr.write("ERROR: %s\n" %(e.args[0],)) - exitstatus = 3 - py.test.config = py.test.config.__class__() - return exitstatus diff --git a/py/_cmdline/pylookup.py b/py/_cmdline/pylookup.py deleted file mode 100755 --- a/py/_cmdline/pylookup.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.lookup [search_directory] SEARCH_STRING [options] - -Looks recursively at Python files for a SEARCH_STRING, starting from the -present working directory. Prints the line, with the filename and line-number -prepended.""" - -import sys, os -import py -from py.io import ansi_print, get_terminal_width -import re - -def rec(p): - return p.check(dotfile=0) - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase", - help="ignore case distinctions") -parser.add_option("-C", "--context", action="store", type="int", dest="context", - default=0, help="How many lines of output to show") - -terminal_width = get_terminal_width() - -def find_indexes(search_line, string): - indexes = [] - before = 0 - while 1: - i = search_line.find(string, before) - if i == -1: - break - indexes.append(i) - before = i + len(string) - return indexes - -def main(): - (options, args) = parser.parse_args() - if len(args) == 2: - search_dir, string = args - search_dir = py.path.local(search_dir) - else: - search_dir = py.path.local() - string = args[0] - if options.ignorecase: - string = string.lower() - for x in search_dir.visit('*.py', rec): - # match filename directly - s = x.relto(search_dir) - if options.ignorecase: - s = s.lower() - if s.find(string) != -1: - sys.stdout.write("%s: filename matches %r" %(x, string) + "\n") - - try: - s = x.read() - except py.error.ENOENT: - pass # whatever, probably broken link (ie emacs lock) - searchs = s - if options.ignorecase: - searchs = s.lower() - if s.find(string) != -1: - lines = s.splitlines() - if options.ignorecase: - searchlines = s.lower().splitlines() - else: - searchlines = lines - for i, (line, searchline) in enumerate(zip(lines, searchlines)): - indexes = find_indexes(searchline, string) - if not indexes: - continue - if not options.context: - sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1)) - last_index = 0 - for index in indexes: - sys.stdout.write(line[last_index: index]) - ansi_print(line[index: index+len(string)], - file=sys.stdout, esc=31, newline=False) - last_index = index + len(string) - sys.stdout.write(line[last_index:] + "\n") - else: - context = (options.context)/2 - for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)): - print("%s:%d: %s" %(x.relto(search_dir), count+1, lines[count].rstrip())) - print("-" * terminal_width) diff --git a/py/bin/py.test b/py/bin/py.test --- a/py/bin/py.test +++ b/py/bin/py.test @@ -1,10 +1,3 @@ #!/usr/bin/env python - -# somewhat PYPY specific hack: -# let's make sure setuptools does show a warning when our inlined 'py' -# version shadows a properly installed one. -import warnings -warnings.filterwarnings("ignore", - "Module py was already imported", category=UserWarning) -from _findpy import py -py.cmdline.pytest() +from _findpy import pytest +pytest.main() diff --git a/py/_path/gateway/channeltest2.py b/py/_path/gateway/channeltest2.py deleted file mode 100644 --- a/py/_path/gateway/channeltest2.py +++ /dev/null @@ -1,21 +0,0 @@ -import py -from remotepath import RemotePath - - -SRC = open('channeltest.py', 'r').read() - -SRC += ''' -import py -srv = PathServer(channel.receive()) -channel.send(srv.p2c(py.path.local("/tmp"))) -''' - - -#gw = execnet.SshGateway('codespeak.net') -gw = execnet.PopenGateway() -gw.remote_init_threads(5) -c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr) -subchannel = gw._channelfactory.new() -c.send(subchannel) - -p = RemotePath(subchannel, c.receive()) diff --git a/py/_test/session.py b/py/_test/session.py deleted file mode 100644 --- a/py/_test/session.py +++ /dev/null @@ -1,135 +0,0 @@ -""" basic test session implementation. - -* drives collection of tests -* triggers executions of tests -* produces events used by reporting -""" - -import py - -# exitcodes for the command line -EXIT_OK = 0 -EXIT_TESTSFAILED = 1 -EXIT_INTERRUPTED = 2 -EXIT_INTERNALERROR = 3 -EXIT_NOHOSTS = 4 - -# imports used for genitems() -Item = py.test.collect.Item -Collector = py.test.collect.Collector - -class Session(object): - nodeid = "" - class Interrupted(KeyboardInterrupt): - """ signals an interrupted test run. """ - __module__ = 'builtins' # for py3 - - def __init__(self, config): - self.config = config - self.pluginmanager = config.pluginmanager # shortcut - self.pluginmanager.register(self) - self._testsfailed = 0 - self._nomatch = False - self.shouldstop = False - - def genitems(self, colitems, keywordexpr=None): - """ yield Items from iterating over the given colitems. """ - if colitems: - colitems = list(colitems) - while colitems: - next = colitems.pop(0) - if isinstance(next, (tuple, list)): - colitems[:] = list(next) + colitems - continue - assert self.pluginmanager is next.config.pluginmanager - if isinstance(next, Item): - remaining = self.filteritems([next]) - if remaining: - self.config.hook.pytest_itemstart(item=next) - yield next - else: - assert isinstance(next, Collector) - self.config.hook.pytest_collectstart(collector=next) - rep = self.config.hook.pytest_make_collect_report(collector=next) - if rep.passed: - for x in self.genitems(rep.result, keywordexpr): - yield x - self.config.hook.pytest_collectreport(report=rep) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - - def filteritems(self, colitems): - """ return items to process (some may be deselected)""" - keywordexpr = self.config.option.keyword - if not keywordexpr or self._nomatch: - return colitems - if keywordexpr[-1] == ":": - keywordexpr = keywordexpr[:-1] - remaining = [] - deselected = [] - for colitem in colitems: - if isinstance(colitem, Item): - if colitem._skipbykeyword(keywordexpr): - deselected.append(colitem) - continue - remaining.append(colitem) - if deselected: - self.config.hook.pytest_deselected(items=deselected) - if self.config.option.keyword.endswith(":"): - self._nomatch = True - return remaining - - def collect(self, colitems): - keyword = self.config.option.keyword - for x in self.genitems(colitems, keyword): - yield x - - def sessionstarts(self): - """ setup any neccessary resources ahead of the test run. """ - self.config.hook.pytest_sessionstart(session=self) - - def pytest_runtest_logreport(self, report): - if report.failed: - self._testsfailed += 1 - maxfail = self.config.getvalue("maxfail") - if maxfail and self._testsfailed >= maxfail: - self.shouldstop = "stopping after %d failures" % ( - self._testsfailed) - pytest_collectreport = pytest_runtest_logreport - - def sessionfinishes(self, exitstatus): - """ teardown any resources after a test run. """ - self.config.hook.pytest_sessionfinish( - session=self, - exitstatus=exitstatus, - ) - - def main(self, colitems): - """ main loop for running tests. """ - self.shouldstop = False - self.sessionstarts() - exitstatus = EXIT_OK - try: - self._mainloop(colitems) - if self._testsfailed: - exitstatus = EXIT_TESTSFAILED - self.sessionfinishes(exitstatus=exitstatus) - except KeyboardInterrupt: - excinfo = py.code.ExceptionInfo() - self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo) - exitstatus = EXIT_INTERRUPTED - except: - excinfo = py.code.ExceptionInfo() - self.config.pluginmanager.notify_exception(excinfo) - exitstatus = EXIT_INTERNALERROR - if exitstatus in (EXIT_INTERNALERROR, EXIT_INTERRUPTED): - self.sessionfinishes(exitstatus=exitstatus) - return exitstatus - - def _mainloop(self, colitems): - for item in self.collect(colitems): - if not self.config.option.collectonly: - item.config.hook.pytest_runtest_protocol(item=item) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - diff --git a/py/_code/oldmagic2.py b/py/_code/oldmagic2.py deleted file mode 100644 --- a/py/_code/oldmagic2.py +++ /dev/null @@ -1,6 +0,0 @@ - -import py - -py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2) - -from py.code import _AssertionError as AssertionError diff --git a/py/bin/py.svnwcrevert b/py/bin/py.svnwcrevert deleted file mode 100755 --- a/py/bin/py.svnwcrevert +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pysvnwcrevert() \ No newline at end of file From commits-noreply at bitbucket.org Tue Mar 8 13:05:40 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 13:05:40 +0100 (CET) Subject: [pypy-svn] pypy default: substitute now unsupported "disabled = True" Message-ID: <20110308120540.0A81936C20A@codespeak.net> Author: holger krekel Branch: Changeset: r42473:804b4ef0947d Date: 2011-03-08 13:05 +0100 http://bitbucket.org/pypy/pypy/changeset/804b4ef0947d/ Log: substitute now unsupported "disabled = True" diff --git a/lib_pypy/distributed/test/test_distributed.py b/lib_pypy/distributed/test/test_distributed.py --- a/lib_pypy/distributed/test/test_distributed.py +++ b/lib_pypy/distributed/test/test_distributed.py @@ -4,9 +4,11 @@ from pypy.conftest import gettestobjspace import sys +import pytest class AppTestNoProxy(object): - disabled = True + + @pytest.mark.xfail(run=False) def test_init(self): raises(ImportError, "import distributed") From commits-noreply at bitbucket.org Tue Mar 8 13:56:39 2011 From: commits-noreply at bitbucket.org (tav) Date: Tue, 8 Mar 2011 13:56:39 +0100 (CET) Subject: [pypy-svn] pypy default: Reapplied fix for failing tests on systems w/o graphviz. Message-ID: <20110308125639.BF9F636C20A@codespeak.net> Author: tav Branch: Changeset: r42474:5ee2d7993eb1 Date: 2011-03-08 12:56 +0000 http://bitbucket.org/pypy/pypy/changeset/5ee2d7993eb1/ Log: Reapplied fix for failing tests on systems w/o graphviz. diff --git a/pypy/doc/pytest_restdoc.py b/pypy/doc/pytest_restdoc.py --- a/pypy/doc/pytest_restdoc.py +++ b/pypy/doc/pytest_restdoc.py @@ -74,14 +74,19 @@ py.test.importorskip("docutils") self.register_linkrole() from docutils.utils import SystemMessage - try: + try: self._checkskip(path, self.project.get_htmloutputpath(path)) self.project.process(path) - except KeyboardInterrupt: - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") + except KeyboardInterrupt: + raise + except SystemExit, error: + if error.message == "ERROR: dot not found": + py.test.skip("system doesn't have graphviz installed") + return + raise + except SystemMessage: + # we assume docutils printed info on stdout + py.test.fail("docutils processing failed, see captured stderr") def register_linkrole(self): #directive.register_linkrole('api', self.resolve_linkrole) From commits-noreply at bitbucket.org Tue Mar 8 14:04:15 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 14:04:15 +0100 (CET) Subject: [pypy-svn] pypy default: port a cleanup and some windows related fixes from _pytest trunk (thanks amaury) Message-ID: <20110308130415.343E136C209@codespeak.net> Author: holger krekel Branch: Changeset: r42475:84e4dcf5099f Date: 2011-03-08 14:03 +0100 http://bitbucket.org/pypy/pypy/changeset/84e4dcf5099f/ Log: port a cleanup and some windows related fixes from _pytest trunk (thanks amaury) diff --git a/_pytest/terminal.py b/_pytest/terminal.py --- a/_pytest/terminal.py +++ b/_pytest/terminal.py @@ -283,7 +283,7 @@ return #for i, testarg in enumerate(self.config.args): # self.write_line("test path %d: %s" %(i+1, testarg)) - + def _printcollecteditems(self, items): # to print out items and their parent collectors # we take care to leave out Instances aka () @@ -335,19 +335,19 @@ excrepr.reprcrash.toterminal(self._tw) def _locationline(self, collect_fspath, fspath, lineno, domain): - if fspath and fspath != collect_fspath: + # collect_fspath comes from testid which has a "/"-normalized path + if fspath and fspath.replace("\\", "/") != collect_fspath: fspath = "%s <- %s" % (collect_fspath, fspath) - if lineno is not None: - lineno += 1 - if fspath and lineno and domain: - line = "%(fspath)s:%(lineno)s: %(domain)s" - elif fspath and domain: - line = "%(fspath)s: %(domain)s" - elif fspath and lineno: - line = "%(fspath)s:%(lineno)s %(extrapath)s" + if fspath: + line = str(fspath) + if lineno is not None: + lineno += 1 + line += ":" + str(lineno) + if domain: + line += ": " + str(domain) else: - line = "[nolocation]" - return line % locals() + " " + line = "[location]" + return line + " " def _getfailureheadline(self, rep): if hasattr(rep, 'location'): From commits-noreply at bitbucket.org Tue Mar 8 14:05:06 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 14:05:06 +0100 (CET) Subject: [pypy-svn] pypy pytest2: pytest2 was merged Message-ID: <20110308130506.EE73E36C209@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42476:2e2e345e47de Date: 2011-03-08 14:04 +0100 http://bitbucket.org/pypy/pypy/changeset/2e2e345e47de/ Log: pytest2 was merged From commits-noreply at bitbucket.org Tue Mar 8 17:10:43 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 17:10:43 +0100 (CET) Subject: [pypy-svn] pypy pytest2: small bug fixes due to pytest2 doing a full collection before running of tests Message-ID: <20110308161043.EAE2E36C209@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42477:c9d6fc564547 Date: 2011-03-08 17:10 +0100 http://bitbucket.org/pypy/pypy/changeset/c9d6fc564547/ Log: small bug fixes due to pytest2 doing a full collection before running of tests diff --git a/lib_pypy/distributed/test/test_distributed.py b/lib_pypy/distributed/test/test_distributed.py --- a/lib_pypy/distributed/test/test_distributed.py +++ b/lib_pypy/distributed/test/test_distributed.py @@ -4,9 +4,11 @@ from pypy.conftest import gettestobjspace import sys +import pytest class AppTestNoProxy(object): - disabled = True + + @pytest.mark.xfail(run=False) def test_init(self): raises(ImportError, "import distributed") diff --git a/lib_pypy/ctypes_config_cache/test/test_cache.py b/lib_pypy/ctypes_config_cache/test/test_cache.py --- a/lib_pypy/ctypes_config_cache/test/test_cache.py +++ b/lib_pypy/ctypes_config_cache/test/test_cache.py @@ -11,6 +11,7 @@ dir=True) tmpdir.join('dumpcache.py').write(dirpath.join('dumpcache.py').read()) path = sys.path[:] + sys.modules.pop('dumpcache', None) try: sys.path.insert(0, str(tmpdir)) execfile(str(filepath), {}) From commits-noreply at bitbucket.org Tue Mar 8 17:13:16 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 17:13:16 +0100 (CET) Subject: [pypy-svn] pypy default: doing the fix in the correct branch Message-ID: <20110308161316.A08C736C209@codespeak.net> Author: holger krekel Branch: Changeset: r42478:54b0eba04d66 Date: 2011-03-08 17:12 +0100 http://bitbucket.org/pypy/pypy/changeset/54b0eba04d66/ Log: doing the fix in the correct branch diff --git a/lib_pypy/ctypes_config_cache/test/test_cache.py b/lib_pypy/ctypes_config_cache/test/test_cache.py --- a/lib_pypy/ctypes_config_cache/test/test_cache.py +++ b/lib_pypy/ctypes_config_cache/test/test_cache.py @@ -11,6 +11,7 @@ dir=True) tmpdir.join('dumpcache.py').write(dirpath.join('dumpcache.py').read()) path = sys.path[:] + sys.modules.pop('dumpcache', None) try: sys.path.insert(0, str(tmpdir)) execfile(str(filepath), {}) From commits-noreply at bitbucket.org Tue Mar 8 17:13:16 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Tue, 8 Mar 2011 17:13:16 +0100 (CET) Subject: [pypy-svn] pypy pytest2: closing the branch Message-ID: <20110308161316.ECB69282B90@codespeak.net> Author: holger krekel Branch: pytest2 Changeset: r42479:379a90934b8a Date: 2011-03-08 17:12 +0100 http://bitbucket.org/pypy/pypy/changeset/379a90934b8a/ Log: closing the branch From commits-noreply at bitbucket.org Tue Mar 8 17:28:27 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 8 Mar 2011 17:28:27 +0100 (CET) Subject: [pypy-svn] pypy default: Remove this test, makes no sense whatsoever Message-ID: <20110308162827.623AD36C20B@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42480:0c4fd6c50446 Date: 2011-03-08 08:28 -0800 http://bitbucket.org/pypy/pypy/changeset/0c4fd6c50446/ Log: Remove this test, makes no sense whatsoever diff --git a/lib_pypy/distributed/test/test_distributed.py b/lib_pypy/distributed/test/test_distributed.py --- a/lib_pypy/distributed/test/test_distributed.py +++ b/lib_pypy/distributed/test/test_distributed.py @@ -6,12 +6,6 @@ import sys import pytest -class AppTestNoProxy(object): - - @pytest.mark.xfail(run=False) - def test_init(self): - raises(ImportError, "import distributed") - class AppTestDistributed(object): def setup_class(cls): cls.space = gettestobjspace(**{"objspace.std.withtproxy": True, From commits-noreply at bitbucket.org Tue Mar 8 20:14:56 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 8 Mar 2011 20:14:56 +0100 (CET) Subject: [pypy-svn] pypy default: (armin, alex, fijal): Don't escape the frame when re-raising an exception. Message-ID: <20110308191456.5030B282BE9@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42481:eb44d135f334 Date: 2011-03-08 11:14 -0800 http://bitbucket.org/pypy/pypy/changeset/eb44d135f334/ Log: (armin, alex, fijal): Don't escape the frame when re-raising an exception. diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -538,11 +538,17 @@ unroller = SContinueLoop(startofloop) return self.unrollstack_and_jump(unroller) + @jit.unroll_safe def RAISE_VARARGS(self, nbargs, next_instr): space = self.space if nbargs == 0: - operror = space.getexecutioncontext().sys_exc_info() - if operror is None: + frame = self + while frame: + if frame.last_exception is not None: + operror = frame.last_exception + break + frame = frame.f_backref() + else: raise OperationError(space.w_TypeError, space.wrap("raise: no active exception to re-raise")) # re-raise, no new traceback obj will be attached diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -123,3 +123,20 @@ loop, = log.loops_by_id("except") ops = list(loop.ops_by_id("except", opcode="COMPARE_OP")) assert ops == [] + + def test_reraise(self): + def f(n): + i = 0 + while i < n: + try: + try: + raise KeyError + except KeyError: + raise + except KeyError: + i += 1 + return i + + log = self.run(f, [100000]) + assert log.result == 100000 + loop, = log.loops_by_filename(self.filepath) From commits-noreply at bitbucket.org Wed Mar 9 11:56:23 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Wed, 9 Mar 2011 11:56:23 +0100 (CET) Subject: [pypy-svn] buildbot default: fix regression showing wrong columns for pytest2-run tests Message-ID: <20110309105623.77E3C2A202B@codespeak.net> Author: holger krekel Branch: Changeset: r446:4cd435c048d2 Date: 2011-03-09 11:56 +0100 http://bitbucket.org/pypy/buildbot/changeset/4cd435c048d2/ Log: fix regression showing wrong columns for pytest2-run tests diff --git a/bot2/pypybuildbot/summary.py b/bot2/pypybuildbot/summary.py --- a/bot2/pypybuildbot/summary.py +++ b/bot2/pypybuildbot/summary.py @@ -67,12 +67,18 @@ def populate_one(self, name, shortrepr, longrepr=None): if shortrepr == '!': namekey = [name, ''] - else: - namekey = name.split(':', 1) + else: + # pytest2 and pytest1 use different separators/test id + # syntax support both here for now + if '.py::' in name: + namekey = name.split('::', 1) + else: + namekey = name.split(':', 1) if namekey[0].endswith('.py'): namekey[0] = namekey[0][:-3].replace('/', '.') if len(namekey) == 1: namekey.append('') + namekey[1] = namekey[1].replace("::", ".") namekey = tuple(namekey) self._outcomes[namekey] = shortrepr @@ -106,7 +112,7 @@ kind = None def add_one(): if kind is not None: - self.populate_one(name, kind, ''.join(longrepr)) + self.populate_one(name, kind, ''.join(longrepr)) for line in log.readlines(): first = line[0] if first == ' ': @@ -570,7 +576,7 @@ mod, testname = self.get_namekey(request) if mod is None: return "no such test" - return "%s %s" % (mod, testname) + return "%s %s" % (mod, testname) def body(self, request): t0 = time.time() @@ -660,7 +666,7 @@ request.site.buildbot_service.head_elements = old_head_elements def getTitle(self, request): - status = self.getStatus(request) + status = self.getStatus(request) return "%s: summaries of last %d revisions" % (status.getProjectName(), N) From commits-noreply at bitbucket.org Wed Mar 9 14:58:05 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 9 Mar 2011 14:58:05 +0100 (CET) Subject: [pypy-svn] pypy default: kill unused parameter Message-ID: <20110309135805.8C17C2A202C@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42482:2b91e7d3bc35 Date: 2011-03-07 13:58 +0100 http://bitbucket.org/pypy/pypy/changeset/2b91e7d3bc35/ Log: kill unused parameter diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -38,7 +38,7 @@ class Log(object): - def __init__(self, func, rawtraces): + def __init__(self, rawtraces): storage = LoopStorage() traces = [SimpleParser.parse_from_input(rawtrace) for rawtrace in rawtraces] traces = storage.reconnect_loops(traces) diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -47,7 +47,7 @@ # parse the JIT log rawlog = logparser.parse_log_file(str(logfile)) rawtraces = logparser.extract_category(rawlog, 'jit-log-opt-') - log = Log(func, rawtraces) + log = Log(rawtraces) log.result = eval(stdout) return log From commits-noreply at bitbucket.org Wed Mar 9 14:58:06 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 9 Mar 2011 14:58:06 +0100 (CET) Subject: [pypy-svn] pypy default: add the possibility to pass an entire source file as string Message-ID: <20110309135806.2836A2A202C@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42483:3ccc4ac27684 Date: 2011-03-07 14:02 +0100 http://bitbucket.org/pypy/pypy/changeset/3ccc4ac27684/ Log: add the possibility to pass an entire source file as string diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -1,4 +1,5 @@ import sys +import types import subprocess import py from lib_pypy import disassembler @@ -19,12 +20,17 @@ def setup_method(self, meth): self.filepath = self.tmpdir.join(meth.im_func.func_name + '.py') - def run(self, func, args=[], **jitopts): + def run(self, func_or_src, args=[], **jitopts): + src = py.code.Source(func_or_src) + if isinstance(func_or_src, types.FunctionType): + funcname = func_or_src.func_name + else: + funcname = 'main' # write the snippet arglist = ', '.join(map(repr, args)) with self.filepath.open("w") as f: - f.write(str(py.code.Source(func)) + "\n") - f.write("print %s(%s)\n" % (func.func_name, arglist)) + f.write(str(src) + "\n") + f.write("print %s(%s)\n" % (funcname, arglist)) # # run a child pypy-c with logging enabled logfile = self.filepath.new(ext='.log') @@ -222,6 +228,16 @@ log = self.run(f, [30, 12]) assert log.result == 42 + def test_run_src(self): + src = """ + def f(a, b): + return a+b + def main(a, b): + return f(a, b) + """ + log = self.run(src, [30, 12]) + assert log.result == 42 + def test_parse_jitlog(self): def f(): i = 0 From commits-noreply at bitbucket.org Wed Mar 9 14:58:06 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 9 Mar 2011 14:58:06 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110309135806.6A5472A202D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42484:ea739f0d92e2 Date: 2011-03-09 14:52 +0100 http://bitbucket.org/pypy/pypy/changeset/ea739f0d92e2/ Log: merge heads From commits-noreply at bitbucket.org Wed Mar 9 14:58:07 2011 From: commits-noreply at bitbucket.org (Greg Price) Date: Wed, 9 Mar 2011 14:58:07 +0100 (CET) Subject: [pypy-svn] pypy default: fix segmentation fault on parsing some invalid Python Message-ID: <20110309135807.4D9B52A202C@codespeak.net> Author: Greg Price Branch: Changeset: r42485:0db4ac049ea2 Date: 2011-03-09 03:26 -0800 http://bitbucket.org/pypy/pypy/changeset/0db4ac049ea2/ Log: fix segmentation fault on parsing some invalid Python diff --git a/pypy/interpreter/astcompiler/test/test_compiler.py b/pypy/interpreter/astcompiler/test/test_compiler.py --- a/pypy/interpreter/astcompiler/test/test_compiler.py +++ b/pypy/interpreter/astcompiler/test/test_compiler.py @@ -70,6 +70,9 @@ st = simple_test + def error_test(self, source, exc_type): + py.test.raises(exc_type, self.simple_test, source, None, None) + def test_long_jump(self): func = """def f(x): y = 0 @@ -98,11 +101,13 @@ self.simple_test(stmt, "type(x)", int) def test_tuple_assign(self): + yield self.error_test, "() = 1", SyntaxError yield self.simple_test, "x,= 1,", "x", 1 yield self.simple_test, "x,y = 1,2", "x,y", (1, 2) yield self.simple_test, "x,y,z = 1,2,3", "x,y,z", (1, 2, 3) yield self.simple_test, "x,y,z,t = 1,2,3,4", "x,y,z,t", (1, 2, 3, 4) yield self.simple_test, "x,y,x,t = 1,2,3,4", "x,y,t", (3, 2, 4) + yield self.simple_test, "[] = []", "1", 1 yield self.simple_test, "[x]= 1,", "x", 1 yield self.simple_test, "[x,y] = [1,2]", "x,y", (1, 2) yield self.simple_test, "[x,y,z] = 1,2,3", "x,y,z", (1, 2, 3) diff --git a/pypy/interpreter/astcompiler/asthelpers.py b/pypy/interpreter/astcompiler/asthelpers.py --- a/pypy/interpreter/astcompiler/asthelpers.py +++ b/pypy/interpreter/astcompiler/asthelpers.py @@ -40,9 +40,10 @@ return self.elts def set_context(self, ctx): - for elt in self.elts: - elt.set_context(ctx) - self.ctx = ctx + if self.elts: + for elt in self.elts: + elt.set_context(ctx) + self.ctx = ctx class __extend__(ast.Attribute): From commits-noreply at bitbucket.org Wed Mar 9 14:58:07 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 9 Mar 2011 14:58:07 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110309135807.874C62A202D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42486:0fd0eb0022f5 Date: 2011-03-09 14:53 +0100 http://bitbucket.org/pypy/pypy/changeset/0fd0eb0022f5/ Log: merge heads From commits-noreply at bitbucket.org Wed Mar 9 16:13:25 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 9 Mar 2011 16:13:25 +0100 (CET) Subject: [pypy-svn] pypy default: (fijal, alex): fixed flow objspace/annotation of reraising exceptions Message-ID: <20110309151325.B33EA2A202B@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42487:45053e4a78a0 Date: 2011-03-09 10:04 -0500 http://bitbucket.org/pypy/pypy/changeset/45053e4a78a0/ Log: (fijal, alex): fixed flow objspace/annotation of reraising exceptions diff --git a/pypy/interpreter/pyopcode.py b/pypy/interpreter/pyopcode.py --- a/pypy/interpreter/pyopcode.py +++ b/pypy/interpreter/pyopcode.py @@ -543,9 +543,10 @@ space = self.space if nbargs == 0: frame = self + ec = self.space.getexecutioncontext() while frame: if frame.last_exception is not None: - operror = frame.last_exception + operror = ec._convert_exc(frame.last_exception) break frame = frame.f_backref() else: diff --git a/pypy/interpreter/executioncontext.py b/pypy/interpreter/executioncontext.py --- a/pypy/interpreter/executioncontext.py +++ b/pypy/interpreter/executioncontext.py @@ -92,7 +92,7 @@ self.topframe = ec.gettopframe() self.w_tracefunc = ec.w_tracefunc self.profilefunc = ec.profilefunc - self.w_profilefuncarg = ec.w_profilefuncarg + self.w_profilefuncarg = ec.w_profilefuncarg self.is_tracing = ec.is_tracing def clear_framestack(self): @@ -195,13 +195,16 @@ self._trace(frame, 'exception', None, operationerr) #operationerr.print_detailed_traceback(self.space) + def _convert_exc(self, operr): + return operr + def sys_exc_info(self): # attn: the result is not the wrapped sys.exc_info() !!! """Implements sys.exc_info(). Return an OperationError instance or None.""" frame = self.gettopframe_nohidden() while frame: if frame.last_exception is not None: - return frame.last_exception + return self._convert_exc(frame.last_exception) frame = self.getnextframe_nohidden(frame) return None @@ -262,7 +265,7 @@ return True space = self.space - + # Tracing cases if event == 'call': w_callback = self.w_tracefunc @@ -303,7 +306,7 @@ if event == 'leaveframe': event = 'return' - assert self.is_tracing == 0 + assert self.is_tracing == 0 self.is_tracing += 1 try: try: @@ -506,7 +509,7 @@ for i in range(len(pending_w)): w_ref = pending_w[i] w_ref.activate_callback() - + class FrameTraceAction(AsyncAction): """An action that calls the local trace functions (w_f_trace).""" diff --git a/pypy/objspace/flow/flowcontext.py b/pypy/objspace/flow/flowcontext.py --- a/pypy/objspace/flow/flowcontext.py +++ b/pypy/objspace/flow/flowcontext.py @@ -148,14 +148,14 @@ class Replayer(Recorder): - + def __init__(self, block, booloutcome, nextreplayer): self.crnt_block = block self.listtoreplay = block.operations self.booloutcome = booloutcome self.nextreplayer = nextreplayer self.index = 0 - + def append(self, operation): operation.result = self.listtoreplay[self.index].result assert operation == self.listtoreplay[self.index], ( @@ -188,9 +188,9 @@ name=None): ExecutionContext.__init__(self, space) self.code = code - + self.w_globals = w_globals = space.wrap(globals) - + self.crnt_offset = -1 self.crnt_frame = None if closure is None: @@ -373,8 +373,7 @@ candidates.insert(0, newblock) self.pendingblocks.append(newblock) - def sys_exc_info(self): - operr = ExecutionContext.sys_exc_info(self) + def _convert_exc(self, operr): if isinstance(operr, operation.ImplicitOperationError): # re-raising an implicit operation makes it an explicit one w_value = operr.get_w_value(self.space) From commits-noreply at bitbucket.org Wed Mar 9 16:39:24 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 9 Mar 2011 16:39:24 +0100 (CET) Subject: [pypy-svn] buildbot default: make sure to run test_pypy_c_new in the nightly builds Message-ID: <20110309153924.DED042A202B@codespeak.net> Author: Antonio Cuni Branch: Changeset: r447:341e3beaa587 Date: 2011-03-09 16:39 +0100 http://bitbucket.org/pypy/buildbot/changeset/341e3beaa587/ Log: make sure to run test_pypy_c_new in the nightly builds diff --git a/bot2/pypybuildbot/builds.py b/bot2/pypybuildbot/builds.py --- a/bot2/pypybuildbot/builds.py +++ b/bot2/pypybuildbot/builds.py @@ -202,7 +202,9 @@ logfiles={'pytestLog': 'cpython.log'})) if pypyjit: - # upload nightly build, if we're running jit tests + # kill this step when the transition to test_pypy_c_new has been + # completed + # "old" test_pypy_c self.addStep(PytestCmd( description="pypyjit tests", command=["python", "pypy/test_all.py", @@ -210,6 +212,15 @@ "--resultlog=pypyjit.log", "pypy/module/pypyjit/test"], logfiles={'pytestLog': 'pypyjit.log'})) + # + # "new" test_pypy_c + self.addStep(PytestCmd( + description="pypyjit tests", + command=["pypy/translator/goal/pypy-c", "pypy/test_all.py", + "--resultlog=pypyjit_new.log", + "pypy/module/pypyjit/test_pypy_c"], + logfiles={'pytestLog': 'pypyjit_new.log'})) + if pypyjit: kind = 'jit' else: From commits-noreply at bitbucket.org Thu Mar 10 11:09:06 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 10 Mar 2011 11:09:06 +0100 (CET) Subject: [pypy-svn] pypy default: simplify the JIT fast path for max Message-ID: <20110310100906.9911E282B9E@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42489:f52161d466dd Date: 2011-03-10 11:04 +0100 http://bitbucket.org/pypy/pypy/changeset/f52161d466dd/ Log: simplify the JIT fast path for max diff --git a/pypy/module/__builtin__/functional.py b/pypy/module/__builtin__/functional.py --- a/pypy/module/__builtin__/functional.py +++ b/pypy/module/__builtin__/functional.py @@ -11,7 +11,6 @@ from pypy.rlib.rarithmetic import r_uint, intmask from pypy.rlib.objectmodel import specialize from inspect import getsource, getfile -from pypy.rlib.jit import unroll_safe from pypy.rlib.rbigint import rbigint @@ -135,7 +134,6 @@ return space.newlist(res_w) - at unroll_safe @specialize.arg(2) def min_max(space, args, implementation_of): if implementation_of == "max": @@ -145,13 +143,12 @@ args_w = args.arguments_w if len(args_w) == 2 and not args.keywords: - # Unrollable case - w_max_item = None - for w_item in args_w: - if w_max_item is None or \ - space.is_true(compare(w_item, w_max_item)): - w_max_item = w_item - return w_max_item + # simple case, suitable for the JIT + w_arg0, w_arg1 = args_w + if space.is_true(compare(w_arg0, w_arg1)): + return w_arg0 + else: + return w_arg1 else: return min_max_loop(space, args, implementation_of) From commits-noreply at bitbucket.org Thu Mar 10 11:09:07 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 10 Mar 2011 11:09:07 +0100 (CET) Subject: [pypy-svn] pypy default: kill unneeded import Message-ID: <20110310100907.25B73282B9E@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42490:ecca7a8f6db6 Date: 2011-03-10 11:08 +0100 http://bitbucket.org/pypy/pypy/changeset/ecca7a8f6db6/ Log: kill unneeded import diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -10,7 +10,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.objectmodel import current_object_addr_as_int, compute_hash from pypy.rlib.jit import hint, purefunction_promote, we_are_jitted -from pypy.rlib.jit import dont_look_inside, purefunction +from pypy.rlib.jit import purefunction from pypy.rlib.rarithmetic import intmask, r_uint from copy_reg import _HEAPTYPE From commits-noreply at bitbucket.org Thu Mar 10 14:04:14 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 10 Mar 2011 14:04:14 +0100 (CET) Subject: [pypy-svn] pypy default: using longs leads to different traces on 32 and 64 bits. Instead, we use Message-ID: <20110310130414.B90FA282BA1@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42491:2fcffa92e796 Date: 2011-03-08 12:15 +0100 http://bitbucket.org/pypy/pypy/changeset/2fcffa92e796/ Log: using longs leads to different traces on 32 and 64 bits. Instead, we use another function which is not seen by the JIT for this test diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -133,11 +133,11 @@ for op in self._ops_for_chunk(chunk, include_debug_merge_points): yield op - def print_ops(self, id=None): + def print_ops(self, id=None, **kwds): if id is None: ops = self.allops() else: - ops = self.ops_by_id(id) + ops = self.ops_by_id(id, **kwds) print '\n'.join(map(str, ops)) def ops_by_id(self, id, include_debug_merge_points=False, opcode=None): diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -124,6 +124,22 @@ ops = list(loop.ops_by_id("except", opcode="COMPARE_OP")) assert ops == [] + def test_simple_call(self): + src = """ + OFFSET = 0 + def f(i): + return i + 1 + OFFSET # ID: add + def main(n): + i = 0 + while i < n+OFFSET: + i = f(f(i)) # ID: call + return i + """ + log = self.run(src, [1000], threshold=400) + assert log.result == 1000 + entry_bridge, = log.loops_by_id('call', is_entry_bridge=True) + import pdb;pdb.set_trace() + def test_reraise(self): def f(n): i = 0 diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -400,20 +400,22 @@ def test_match_constants(self): def f(): - i = 0L # force it to long, so that we get calls to rbigint + from socket import ntohs + i = 0 while i < 1003: - i += 1L # ID: increment + i += 1 + j = ntohs(1) # ID: ntohs a = 0 return i log = self.run(f) - loop, = log.loops_by_id('increment') - assert loop.match_by_id('increment', """ - p12 = call(ConstClass(rbigint.add), p4, ConstPtr(ptr11), descr=...) + loop, = log.loops_by_id('ntohs') + assert loop.match_by_id('ntohs', """ + p12 = call(ConstClass(ntohs), 1, descr=...) guard_no_exception(descr=...) """) # - assert not loop.match_by_id('increment', """ - p12 = call(ConstClass(rbigint.SUB), p4, ConstPtr(ptr11), descr=...) + assert not loop.match_by_id('ntohs', """ + p12 = call(ConstClass(foobar), 1, descr=...) guard_no_exception(descr=...) """) From commits-noreply at bitbucket.org Thu Mar 10 15:05:59 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:05:59 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Add a pic Message-ID: <20110310140559.CC15D36C20D@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3362:38fa4c1edf8a Date: 2011-03-10 09:03 -0500 http://bitbucket.org/pypy/extradoc/changeset/38fa4c1edf8a/ Log: Add a pic diff --git a/talk/pycon2011/whyslow/carl_tests.jpg b/talk/pycon2011/whyslow/carl_tests.jpg new file mode 100644 index 0000000000000000000000000000000000000000..85fd1e6a5dc466e9ba985b44a3c1410e4ee50d0b GIT binary patch [cut] diff --git a/talk/ustour2011/author.latex b/talk/ustour2011/author.latex --- a/talk/ustour2011/author.latex +++ b/talk/ustour2011/author.latex @@ -4,5 +4,5 @@ \author[fijal, agaynor, arigato] {Maciej Fijałkowski \\ Alex Gaynor \\ Armin Rigo} -\institute{Google} +\institute{Mozilla} \date{7 March 2011} diff --git a/talk/ustour2011/Makefile b/talk/ustour2011/Makefile --- a/talk/ustour2011/Makefile +++ b/talk/ustour2011/Makefile @@ -6,5 +6,11 @@ sed 's/\\maketitle/\\input{title.latex}/' -i google-talk.latex || exit pdflatex google-talk.latex || exit +mozilla-talk.pdf: mozilla-talk.txt author.latex title.latex stylesheet.latex + rst2beamer --input-encoding=utf-8 --output-encoding=utf-8 --stylesheet=stylesheet.latex --documentoptions=14pt --theme=Warsaw --overlaybullets=False mozilla-talk.txt mozilla-talk.latex || exit + sed 's/\\date{}/\\input{author.latex}/' -i mozilla-talk.latex || exit + sed 's/\\maketitle/\\input{title.latex}/' -i mozilla-talk.latex || exit + pdflatex mozilla-talk.latex || exit + view: google-talk.pdf evince google-talk.pdf & \ No newline at end of file From commits-noreply at bitbucket.org Thu Mar 10 15:06:00 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:06:00 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: merge Message-ID: <20110310140600.1514C282BA1@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3363:6eccfee80cf4 Date: 2011-03-10 09:05 -0500 http://bitbucket.org/pypy/extradoc/changeset/6eccfee80cf4/ Log: merge From commits-noreply at bitbucket.org Thu Mar 10 15:07:12 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:07:12 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Add an attribute acces demo Message-ID: <20110310140712.C21F136C20D@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3364:c3a31b63583c Date: 2011-03-10 09:07 -0500 http://bitbucket.org/pypy/extradoc/changeset/c3a31b63583c/ Log: Add an attribute acces demo diff --git a/talk/pycon2011/whyslow/examples/attr.py b/talk/pycon2011/whyslow/examples/attr.py new file mode 100644 --- /dev/null +++ b/talk/pycon2011/whyslow/examples/attr.py @@ -0,0 +1,13 @@ + +class A(object): + def __init__(self, x): + self.x = x + +def f(): + a = A(1) + i = 0 + while i < 2000: + i += a.x + +if __name__ == '__main__': + f() From commits-noreply at bitbucket.org Thu Mar 10 15:08:02 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:08:02 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Work on talk. Message-ID: <20110310140802.6F42E36C20D@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3365:e94a05cec2b9 Date: 2011-03-10 09:07 -0500 http://bitbucket.org/pypy/extradoc/changeset/e94a05cec2b9/ Log: Work on talk. diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -5,6 +5,8 @@ What's this talk about? ----------------------- +* very short intro to the PyPy project + * short introduction to JITting * how does a tracing JIT work @@ -12,6 +14,23 @@ * semantics that make Python slow/hard to optimize XXX cross slow +What is PyPy +------------ + +* Open source (MIT license) + +* 8 years old + +* Stable Python interpreter with 99.99% compatibility + +Well tested +----------- + +* 150 KLOC of tests + +.. image:: carl_tests.jpg + + Short introduction to JITting ----------------------------- @@ -39,8 +58,6 @@ * we have cool tools! -XXX pic - Part 2 - python semantics -------------------------- @@ -64,12 +81,12 @@ * ``a + b`` can call integer addition, string concatenation or custom ``__add__`` method +* That's simplified, you also have ``__radd__``, and special cases for objects implemented in C, and for performance, and ... + * not much to talk about, tracing JIT deals with this without extra effort -* it can get fairly complex (XXX http://hg.python.org/cpython/file/6910af7df354/Objects/abstract.c#l761) - -* all of this logic is constant folded (XXX trace) +* all of this logic is constant folded Boxing ------ @@ -84,8 +101,6 @@ * frames get in the way (they escape locals and valuestack) -XXX more traces - Frame introspection ------------------- From commits-noreply at bitbucket.org Thu Mar 10 15:08:02 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:08:02 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Merged upstream. Message-ID: <20110310140802.A7911282BA1@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3366:2c7f1e0d17ce Date: 2011-03-10 09:07 -0500 http://bitbucket.org/pypy/extradoc/changeset/2c7f1e0d17ce/ Log: Merged upstream. From commits-noreply at bitbucket.org Thu Mar 10 15:12:06 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:12:06 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: __dict__ images. Message-ID: <20110310141206.4F65636C208@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3367:bac64d383e33 Date: 2011-03-10 09:11 -0500 http://bitbucket.org/pypy/extradoc/changeset/bac64d383e33/ Log: __dict__ images. diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -135,6 +135,8 @@ * 3 dict lookups +* Super simplified, full version doesn't fit on this slide. + Map dicts ------------- @@ -156,7 +158,15 @@ * those dictionary lookups are constant-folded away at the time of JIT compilation -XXX cool pics +CPython ``__dict__`` +-------------------- + +.. image:: cpython-instance.png + +Map dicts (2) +------------- + +.. image:: dictinstancemap.png Dynamic method lookup --------------------- diff --git a/talk/pycon2011/whyslow/cpython-instance.png b/talk/pycon2011/whyslow/cpython-instance.png new file mode 100644 index 0000000000000000000000000000000000000000..648836897be0f3470f59ec5b4a70fe9fc3f7d157 GIT binary patch [cut] diff --git a/talk/pycon2011/whyslow/dictinstancemap.png b/talk/pycon2011/whyslow/dictinstancemap.png new file mode 100644 index 0000000000000000000000000000000000000000..23d4780bbeec5d64ddbf14581f5ddd8d5e1bccb5 GIT binary patch [cut] From commits-noreply at bitbucket.org Thu Mar 10 15:13:27 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:13:27 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: added fast slide. Message-ID: <20110310141327.D49A136C209@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3368:94ebe959de53 Date: 2011-03-10 09:13 -0500 http://bitbucket.org/pypy/extradoc/changeset/94ebe959de53/ Log: added fast slide. diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -30,6 +30,13 @@ .. image:: carl_tests.jpg +Fast +---- + +* http://speed.pypy.org + +* How do we do it? + Short introduction to JITting ----------------------------- From commits-noreply at bitbucket.org Thu Mar 10 15:15:10 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:15:10 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Remove XXX Message-ID: <20110310141510.A547A36C209@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3369:928f39e82b23 Date: 2011-03-10 09:15 -0500 http://bitbucket.org/pypy/extradoc/changeset/928f39e82b23/ Log: Remove XXX diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -12,7 +12,6 @@ * how does a tracing JIT work * semantics that make Python slow/hard to optimize -XXX cross slow What is PyPy ------------ From commits-noreply at bitbucket.org Thu Mar 10 15:15:50 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:15:50 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Add make and stuff Message-ID: <20110310141550.BBC5D36C209@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3370:ea1c214c19c5 Date: 2011-03-10 09:15 -0500 http://bitbucket.org/pypy/extradoc/changeset/ea1c214c19c5/ Log: Add make and stuff diff --git a/talk/pycon2011/whyslow/title.latex b/talk/pycon2011/whyslow/title.latex new file mode 100644 --- /dev/null +++ b/talk/pycon2011/whyslow/title.latex @@ -0,0 +1,5 @@ +\begin{titlepage} +\begin{figure}[h] +\scalebox{0.8}{\includegraphics[width=80px]{../../img/py-web.png}} +\end{figure} +\end{titlepage} diff --git a/talk/pycon2011/whyslow/Makefile b/talk/pycon2011/whyslow/Makefile new file mode 100644 --- /dev/null +++ b/talk/pycon2011/whyslow/Makefile @@ -0,0 +1,10 @@ + + +whyslow-talk.pdf: talk.rst author.latex title.latex stylesheet.latex + rst2beamer --input-encoding=utf-8 --output-encoding=utf-8 --stylesheet=stylesheet.latex --documentoptions=14pt --theme=Warsaw --overlaybullets=False talk.rst whyslow-talk.latex || exit + sed 's/\\date{}/\\input{author.latex}/' -i whyslow-talk.latex || exit + sed 's/\\maketitle/\\input{title.latex}/' -i whyslow-talk.latex || exit + pdflatex whyslow-talk.latex || exit + +view: whyslow-talk.pdf + evince whyslow-talk.pdf & \ No newline at end of file diff --git a/talk/pycon2011/whyslow/stylesheet.latex b/talk/pycon2011/whyslow/stylesheet.latex new file mode 100644 --- /dev/null +++ b/talk/pycon2011/whyslow/stylesheet.latex @@ -0,0 +1,10 @@ +\usetheme{Warsaw} +\setbeamercovered{transparent} +\setbeamertemplate{navigation symbols}{} + +\definecolor{darkgreen}{rgb}{0, 0.5, 0.0} +\newcommand{\docutilsrolegreen}[1]{\color{darkgreen}#1\normalcolor} +\newcommand{\docutilsrolered}[1]{\color{red}#1\normalcolor} + +\newcommand{\green}[1]{\color{darkgreen}#1\normalcolor} +\newcommand{\red}[1]{\color{red}#1\normalcolor} diff --git a/talk/pycon2011/whyslow/author.latex b/talk/pycon2011/whyslow/author.latex new file mode 100644 --- /dev/null +++ b/talk/pycon2011/whyslow/author.latex @@ -0,0 +1,8 @@ +\definecolor{rrblitbackground}{rgb}{0.0, 0.0, 0.0} + +\title[PyPy]{Why is Python slow and how PyPy can help} +\author[fijal, agaynor, arigato] +{Maciej Fijałkowski, Alex Gaynor, Armin Rigo} + +\institute{PyCon 2011} +\date{11 March 2011} From commits-noreply at bitbucket.org Thu Mar 10 15:15:51 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:15:51 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: merge Message-ID: <20110310141551.0657E36C20D@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3371:fe888b9a2e2e Date: 2011-03-10 09:15 -0500 http://bitbucket.org/pypy/extradoc/changeset/fe888b9a2e2e/ Log: merge From commits-noreply at bitbucket.org Thu Mar 10 15:19:19 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 15:19:19 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: update Message-ID: <20110310141919.54EE236C209@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3372:4c410a1661fc Date: 2011-03-10 09:19 -0500 http://bitbucket.org/pypy/extradoc/changeset/4c410a1661fc/ Log: update diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -28,6 +28,8 @@ * 150 KLOC of tests .. image:: carl_tests.jpg + :scale: 12% + :align: center Fast ---- @@ -168,11 +170,15 @@ -------------------- .. image:: cpython-instance.png + :scale: 50% + :align: center Map dicts (2) ------------- .. image:: dictinstancemap.png + :scale: 30% + :align: center Dynamic method lookup --------------------- From commits-noreply at bitbucket.org Thu Mar 10 15:23:33 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 10 Mar 2011 15:23:33 +0100 (CET) Subject: [pypy-svn] pypy default: Our sys.stdout has a different implementation and is fully buffered when redirected. Message-ID: <20110310142333.B535236C20D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42492:e6717b868a95 Date: 2011-03-10 15:23 +0100 http://bitbucket.org/pypy/pypy/changeset/e6717b868a95/ Log: Our sys.stdout has a different implementation and is fully buffered when redirected. Fortunately, CPython3 has the exact same fix for the same reason. diff --git a/lib-python/modified-2.7.0/test/test_threading.py b/lib-python/modified-2.7.0/test/test_threading.py --- a/lib-python/modified-2.7.0/test/test_threading.py +++ b/lib-python/modified-2.7.0/test/test_threading.py @@ -429,6 +429,9 @@ def joiningfunc(mainthread): mainthread.join() print 'end of thread' + # stdout is fully buffered because not a tty, we have to flush + # before exit. + sys.stdout.flush() \n""" + script import subprocess From commits-noreply at bitbucket.org Thu Mar 10 15:24:36 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:24:36 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: work work Message-ID: <20110310142436.C2CE636C20D@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3373:b05dbf74e36f Date: 2011-03-10 09:24 -0500 http://bitbucket.org/pypy/extradoc/changeset/b05dbf74e36f/ Log: work work diff --git a/talk/pycon2011/whyslow/talk.rst b/talk/pycon2011/whyslow/talk.rst --- a/talk/pycon2011/whyslow/talk.rst +++ b/talk/pycon2011/whyslow/talk.rst @@ -193,15 +193,30 @@ * call the bound method +Version tags and inlining +------------------------- + +* For every type give it a version, any time it changes, increment the version (e.g. modifying MRO or assigning attribute) + +* CPython does the trick too + +* But it's more powerful with a JIT + +* When looking up a method, check the version tag and create a guard + +* Then inline the method and eliminate frame + Linking it all together ----------------------- -* array example +* ``translate.py``: a large Python application (200 KLOC) + +* 1 hour on CPython, 25 minutes on PyPy Things we did not talk about ---------------------------- -* regular expressions +* regular expressions (JIT'd) * generators @@ -216,9 +231,15 @@ * numpy +* Python 3 + Thank you ----------- * http://pypy.org * http://morepypy.blogspot.com/ + +* http://speed.pypy.org/ + +* Questions? From commits-noreply at bitbucket.org Thu Mar 10 15:35:53 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:35:53 +0100 (CET) Subject: [pypy-svn] pypy default: Fix unicode % unicode_subclass Message-ID: <20110310143553.159EC36C20D@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42493:29c9daa92726 Date: 2011-03-10 09:35 -0500 http://bitbucket.org/pypy/pypy/changeset/29c9daa92726/ Log: Fix unicode % unicode_subclass diff --git a/pypy/objspace/std/test/test_unicodeobject.py b/pypy/objspace/std/test/test_unicodeobject.py --- a/pypy/objspace/std/test/test_unicodeobject.py +++ b/pypy/objspace/std/test/test_unicodeobject.py @@ -833,3 +833,5 @@ b = unicode(a) assert type(b) is unicode assert b == u'hello \u1234' + + assert u'%s' % S(u'mar\xe7') == u'mar\xe7' \ No newline at end of file diff --git a/pypy/objspace/std/unicodetype.py b/pypy/objspace/std/unicodetype.py --- a/pypy/objspace/std/unicodetype.py +++ b/pypy/objspace/std/unicodetype.py @@ -302,6 +302,8 @@ # obscure workaround: for the next two lines see # test_unicode_conversion_with__str__ if w_unicode_method is None: + if space.isinstance_w(w_obj, space.w_unicode): + return space.wrap(space.unicode_w(w_obj)) w_unicode_method = space.lookup(w_obj, "__str__") if w_unicode_method is not None: w_res = space.get_and_call_function(w_unicode_method, w_obj) From commits-noreply at bitbucket.org Thu Mar 10 15:35:53 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 15:35:53 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110310143553.5553836C20F@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42494:e7c3f9edd1d0 Date: 2011-03-10 09:35 -0500 http://bitbucket.org/pypy/pypy/changeset/e7c3f9edd1d0/ Log: Merged upstream. From commits-noreply at bitbucket.org Thu Mar 10 19:59:34 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 10 Mar 2011 19:59:34 +0100 (CET) Subject: [pypy-svn] pypy default: Don't skip this test on other platforms. Message-ID: <20110310185934.72111282B8B@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42495:be1aee12578c Date: 2011-03-10 13:59 -0500 http://bitbucket.org/pypy/pypy/changeset/be1aee12578c/ Log: Don't skip this test on other platforms. diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -337,7 +337,7 @@ RegrTest('test_peepholer.py'), RegrTest('test_pep247.py'), RegrTest('test_pep263.py'), - RegrTest('test_pep277.py', skip=only_win32), + RegrTest('test_pep277.py'), RegrTest('test_pep292.py'), RegrTest('test_pickle.py', core=True), RegrTest('test_pickletools.py', core=False), From commits-noreply at bitbucket.org Thu Mar 10 20:43:16 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 10 Mar 2011 20:43:16 +0100 (CET) Subject: [pypy-svn] pypy default: make sure that we can use ID: also for loop conditions Message-ID: <20110310194316.B7CE936C20E@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42496:c8bc558fe8d7 Date: 2011-03-10 17:50 +0100 http://bitbucket.org/pypy/pypy/changeset/c8bc558fe8d7/ Log: make sure that we can use ID: also for loop conditions diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -99,12 +99,11 @@ # 1. compute the ids of self, i.e. the outer function id2opcodes = find_ids(self.code) all_my_opcodes = self.get_set_of_opcodes() - # XXX: for now, we just look for the first opcode in the id range for id, opcodes in id2opcodes.iteritems(): if not opcodes: continue - target_opcode = opcodes[0] - if target_opcode in all_my_opcodes: + target_opcodes = set(opcodes) + if all_my_opcodes.intersection(target_opcodes): ids[id] = opcodes # # 2. compute the ids of all the inlined functions diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -86,6 +86,7 @@ opcodes_names = [opcode.__class__.__name__ for opcode in myline] assert opcodes_names == ['LOAD_FAST', 'LOAD_CONST', 'BINARY_ADD', 'STORE_FAST'] + class TestOpMatcher(object): def match(self, src1, src2): @@ -283,7 +284,7 @@ def test_ops_by_id(self): def f(): i = 0 - while i < 1003: + while i < 1003: # ID: cond i += 1 # ID: increment a = 0 # to make sure that JUMP_ABSOLUTE is not part of the ID return i @@ -293,6 +294,12 @@ # ops = loop.ops_by_id('increment') assert log.opnames(ops) == ['int_add'] + # + ops = loop.ops_by_id('cond') + # the 'jump' at the end is because the last opcode in the loop + # coincides with the first, and so it thinks that 'jump' belongs to + # the id + assert log.opnames(ops) == ['int_lt', 'guard_true', 'jump'] def test_ops_by_id_and_opcode(self): def f(): From commits-noreply at bitbucket.org Thu Mar 10 20:43:17 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 10 Mar 2011 20:43:17 +0100 (CET) Subject: [pypy-svn] pypy default: port test_simple_call from test_pypy_c Message-ID: <20110310194317.BB46B36C20E@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42497:346cff045b30 Date: 2011-03-10 20:42 +0100 http://bitbucket.org/pypy/pypy/changeset/346cff045b30/ Log: port test_simple_call from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -195,7 +195,9 @@ args = args[:-1] args = args.split(',') args = map(str.strip, args) - if args[-1].startswith('descr='): + if args == ['']: + args = [] + if args and args[-1].startswith('descr='): descr = args.pop() descr = descr[len('descr='):] else: diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -131,14 +131,71 @@ return i + 1 + OFFSET # ID: add def main(n): i = 0 - while i < n+OFFSET: - i = f(f(i)) # ID: call + while i < n+OFFSET: # ID: cond + i = f(f(i)) # ID: call + a = 0 return i """ log = self.run(src, [1000], threshold=400) assert log.result == 1000 + # first, we test what is inside the entry bridge + # ----------------------------------------------- entry_bridge, = log.loops_by_id('call', is_entry_bridge=True) - import pdb;pdb.set_trace() + # LOAD_GLOBAL of OFFSET + ops = entry_bridge.ops_by_id('cond', opcode='LOAD_GLOBAL') + assert log.opnames(ops) == ["guard_value", + "getfield_gc", "guard_value", + "getfield_gc", "guard_isnull", + "getfield_gc", "guard_nonnull_class"] + # LOAD_GLOBAL of OFFSET but in different function partially folded + # away + # XXX could be improved + ops = entry_bridge.ops_by_id('add', opcode='LOAD_GLOBAL') + assert log.opnames(ops) == ["guard_value", "getfield_gc", "guard_isnull"] + # + # two LOAD_GLOBAL of f, the second is folded away + ops = entry_bridge.ops_by_id('call', opcode='LOAD_GLOBAL') + assert log.opnames(ops) == ["getfield_gc", "guard_nonnull_class"] + # + assert entry_bridge.match_by_id('call', """ + p29 = getfield_gc(ConstPtr(ptr28), descr=) + guard_nonnull_class(p29, ConstClass(Function), descr=) + i32 = getfield_gc(p0, descr=) + guard_false(i32, descr=) + p33 = getfield_gc(p29, descr=) + guard_value(p33, ConstPtr(ptr34), descr=) + p35 = getfield_gc(p29, descr=) + p36 = getfield_gc(p29, descr=) + p38 = call(ConstClass(getexecutioncontext), descr=) + p39 = getfield_gc(p38, descr=) + i40 = force_token() + p41 = getfield_gc(p38, descr=) + guard_isnull(p41, descr=) + i42 = getfield_gc(p38, descr=) + i43 = int_is_zero(i42) + guard_true(i43, descr=) + i50 = force_token() + """) + # + # then, we test the actual loop + # ----------------------------- + loop, = log.loops_by_id('call') + assert loop.match(""" + i12 = int_lt(i5, i6) + guard_true(i12, descr=) + i13 = force_token() + i15 = int_add(i5, 1) + i16 = int_add_ovf(i15, i7) + guard_no_overflow(descr=) + i18 = force_token() + i20 = int_add_ovf(i16, 1) + guard_no_overflow(descr=) + i21 = int_add_ovf(i20, i7) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, i21, i6, i7, p8, p9, p10, p11, descr=) + """) + def test_reraise(self): def f(n): diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -126,6 +126,8 @@ assert res == ("setfield_gc", None, ["p0", "i0"], "") res = OpMatcher.parse_op("i1 = getfield_gc(p0, descr=)") assert res == ("getfield_gc", "i1", ["p0"], "") + res = OpMatcher.parse_op("p0 = force_token()") + assert res == ("force_token", "p0", [], None) def test_exact_match(self): loop = """ From commits-noreply at bitbucket.org Thu Mar 10 22:55:20 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 10 Mar 2011 22:55:20 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Implement "enable_opts" as a replacement for various ad-hoc enabling/disabling Message-ID: <20110310215520.11792282B8B@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42498:67c539df0eb6 Date: 2011-03-10 16:54 -0500 http://bitbucket.org/pypy/pypy/changeset/67c539df0eb6/ Log: Implement "enable_opts" as a replacement for various ad-hoc enabling/disabling of optimizations. Export this at applevel as --jit enable_opts=...:...:... diff --git a/pypy/config/translationoption.py b/pypy/config/translationoption.py --- a/pypy/config/translationoption.py +++ b/pypy/config/translationoption.py @@ -117,7 +117,6 @@ ChoiceOption("jit_profiler", "integrate profiler support into the JIT", ["off", "oprofile"], default="off"), - BoolOption("jit_ffi", "optimize libffi calls", default=False), # misc BoolOption("verbose", "Print extra information", default=False), diff --git a/pypy/jit/metainterp/test/test_virtualizable.py b/pypy/jit/metainterp/test/test_virtualizable.py --- a/pypy/jit/metainterp/test/test_virtualizable.py +++ b/pypy/jit/metainterp/test/test_virtualizable.py @@ -5,7 +5,6 @@ from pypy.jit.codewriter.policy import StopAtXPolicy from pypy.jit.codewriter import heaptracker from pypy.rlib.jit import JitDriver, hint, dont_look_inside -from pypy.rlib.jit import OPTIMIZER_SIMPLE, OPTIMIZER_FULL from pypy.rlib.rarithmetic import intmask from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin from pypy.rpython.rclass import FieldListAccessor @@ -374,7 +373,7 @@ promote_virtualizable(xy2, 'inst_l2') return xy2.inst_l2[0] expected = f(20) - res = self.meta_interp(f, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [20], enable_opts='') assert res == expected self.check_loops(getfield_gc=3, setfield_gc=0, arraylen_gc=1, getarrayitem_gc=1, setarrayitem_gc=1) @@ -1369,8 +1368,7 @@ frame = Frame(n) return f("c-l", frame) print main(100) - res = self.meta_interp(main, [100], inline=True, - optimizer=OPTIMIZER_FULL) + res = self.meta_interp(main, [100], inline=True, enable_opts='') class TestOOtype(#ExplicitVirtualizableTests, ImplicitVirtualizableTests, diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5,7 +5,7 @@ BaseTest) import pypy.jit.metainterp.optimizeopt.optimizer as optimizeopt import pypy.jit.metainterp.optimizeopt.virtualize as virtualize -from pypy.jit.metainterp.optimizeopt import optimize_loop_1 +from pypy.jit.metainterp.optimizeopt import optimize_loop_1, ALL_OPTS_DICT from pypy.jit.metainterp.optimizeutil import InvalidLoop from pypy.jit.metainterp.history import AbstractDescr, ConstInt, BoxInt from pypy.jit.metainterp.history import TreeLoop, LoopToken @@ -188,7 +188,7 @@ def clone_if_mutable(self): return self loop.preamble.start_resumedescr = FakeDescr() - optimize_loop_1(metainterp_sd, loop) + optimize_loop_1(metainterp_sd, loop, ALL_OPTS_DICT) # print diff --git a/pypy/jit/metainterp/optimize.py b/pypy/jit/metainterp/optimize.py --- a/pypy/jit/metainterp/optimize.py +++ b/pypy/jit/metainterp/optimize.py @@ -4,42 +4,44 @@ from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1 -def optimize_loop(metainterp_sd, old_loop_tokens, loop): +def optimize_loop(metainterp_sd, old_loop_tokens, loop, enable_opts): debug_start("jit-optimize") try: - return _optimize_loop(metainterp_sd, old_loop_tokens, loop) + return _optimize_loop(metainterp_sd, old_loop_tokens, loop, + enable_opts) finally: debug_stop("jit-optimize") -def _optimize_loop(metainterp_sd, old_loop_tokens, loop): +def _optimize_loop(metainterp_sd, old_loop_tokens, loop, enable_opts): cpu = metainterp_sd.cpu metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) # XXX do we really still need a list? if old_loop_tokens: return old_loop_tokens[0] - optimize_loop_1(metainterp_sd, loop) + optimize_loop_1(metainterp_sd, loop, enable_opts) return None # ____________________________________________________________ -def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, +def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, enable_opts, inline_short_preamble=True, retraced=False): debug_start("jit-optimize") try: return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, + enable_opts, inline_short_preamble, retraced) finally: debug_stop("jit-optimize") -def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, +def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, enable_opts, inline_short_preamble, retraced=False): cpu = metainterp_sd.cpu metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) if old_loop_tokens: old_loop_token = old_loop_tokens[0] bridge.operations[-1].setdescr(old_loop_token) # patch jump target - optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble, - retraced) + optimize_bridge_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble, retraced) return old_loop_tokens[0] #return bridge.operations[-1].getdescr() return None diff --git a/pypy/module/pypyjit/interp_jit.py b/pypy/module/pypyjit/interp_jit.py --- a/pypy/module/pypyjit/interp_jit.py +++ b/pypy/module/pypyjit/interp_jit.py @@ -6,7 +6,7 @@ from pypy.tool.pairtype import extendabletype from pypy.rlib.rarithmetic import r_uint, intmask from pypy.rlib.jit import JitDriver, hint, we_are_jitted, dont_look_inside -from pypy.rlib.jit import current_trace_length +from pypy.rlib.jit import current_trace_length, unroll_parameters import pypy.interpreter.pyopcode # for side-effects from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.pycode import PyCode, CO_GENERATOR @@ -136,12 +136,17 @@ raise OperationError(space.w_ValueError, space.wrap("error in JIT parameters string")) for key, w_value in kwds_w.items(): - intval = space.int_w(w_value) - try: - pypyjitdriver.set_param(key, intval) - except ValueError: - raise operationerrfmt(space.w_TypeError, - "no JIT parameter '%s'", key) + if key == 'enable_opts': + pypyjitdriver.set_param('enable_opts', space.str_w(w_value)) + else: + intval = space.int_w(w_value) + for name, _ in unroll_parameters: + if name == key and name != 'enable_opts': + pypyjitdriver.set_param(name, intval) + break + else: + raise operationerrfmt(space.w_TypeError, + "no JIT parameter '%s'", key) @dont_look_inside def residual_call(space, w_callable, __args__): diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -1,7 +1,7 @@ import py import sys from pypy.rlib.jit import JitDriver, we_are_jitted, hint, dont_look_inside -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_SIMPLE, loop_invariant +from pypy.rlib.jit import loop_invariant from pypy.rlib.jit import jit_debug, assert_green, AssertGreenFailed from pypy.rlib.jit import unroll_safe, current_trace_length from pypy.jit.metainterp.warmspot import ll_meta_interp, get_stats @@ -15,6 +15,7 @@ from pypy.jit.metainterp.typesystem import LLTypeHelper, OOTypeHelper from pypy.rpython.lltypesystem import lltype, llmemory from pypy.rpython.ootypesystem import ootype +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT def _get_jitcodes(testself, CPUClass, func, values, type_system, supports_longlong=False, **kwds): @@ -42,6 +43,7 @@ optimize_bridge = staticmethod(simple_optimize.optimize_bridge) trace_limit = sys.maxint + enable_opts = ALL_OPTS_DICT func._jit_unroll_safe_ = True rtyper = support.annotate(func, values, type_system=type_system) @@ -1176,7 +1178,7 @@ x += inst.foo n -= 1 return x - res = self.meta_interp(f, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [20], enable_opts='') assert res == f(20) self.check_loops(call=0) @@ -1379,8 +1381,7 @@ m = m >> 1 return x - res = self.meta_interp(f, [50, 1], - optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [50, 1], enable_opts='') assert res == 42 def test_set_param(self): @@ -2306,12 +2307,12 @@ res = self.meta_interp(f, [1, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert not res res = self.meta_interp(f, [0, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res class BaseLLtypeTests(BasicTests): @@ -2391,5 +2392,25 @@ self.meta_interp(main, []) + def test_enable_opts(self): + jitdriver = JitDriver(greens = [], reds = ['a']) + + class A(object): + def __init__(self, i): + self.i = i + + def f(): + a = A(0) + + while a.i < 10: + jitdriver.jit_merge_point(a=a) + jitdriver.can_enter_jit(a=a) + a = A(a.i + 1) + + self.meta_interp(f, []) + self.check_loops(new_with_vtable=0) + self.meta_interp(f, [], enable_opts='') + self.check_loops(new_with_vtable=1) + class TestLLtype(BaseLLtypeTests, LLJitMixin): pass diff --git a/pypy/jit/metainterp/test/test_send.py b/pypy/jit/metainterp/test/test_send.py --- a/pypy/jit/metainterp/test/test_send.py +++ b/pypy/jit/metainterp/test/test_send.py @@ -2,10 +2,8 @@ from pypy.rlib.jit import JitDriver, hint, purefunction from pypy.jit.codewriter.policy import StopAtXPolicy from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_SIMPLE -class SendTests: - optimizer=OPTIMIZER_FULL +class SendTests(object): def test_green_send(self): myjitdriver = JitDriver(greens = ['i'], reds = ['counter']) @@ -166,12 +164,8 @@ for j in range(69, 75): res = self.meta_interp(f, [j], policy=policy) assert res == 42 - if self.optimizer != OPTIMIZER_FULL: - self.check_enter_count(3) - self.check_loop_count(3) - else: - self.check_enter_count_at_most(5) - self.check_loop_count_at_most(5) + self.check_enter_count_at_most(5) + self.check_loop_count_at_most(5) def test_oosend_guard_failure(self): myjitdriver = JitDriver(greens = [], reds = ['x', 'y', 'w']) @@ -356,10 +350,7 @@ assert res == f(198) # we get two TreeLoops: an initial one, and one entering from # the interpreter - if self.optimizer != OPTIMIZER_FULL: - self.check_tree_loop_count(1) - else: - self.check_tree_loop_count(2) + self.check_tree_loop_count(2) def test_indirect_call_unknown_object_3(self): myjitdriver = JitDriver(greens = [], reds = ['x', 'y', 'z', 'state']) @@ -396,10 +387,7 @@ assert res == f(198) # we get two TreeLoops: an initial one, and one entering from # the interpreter - if self.optimizer != OPTIMIZER_FULL: - self.check_tree_loop_count(1) - else: - self.check_tree_loop_count(2) + self.check_tree_loop_count(2) def test_two_behaviors(self): py.test.skip("XXX fix me!!!!!!! problem in optimize.py") @@ -452,10 +440,7 @@ # we expect 1 loop, 1 entry bridge, and 1 bridge going from the # loop back to the start of the entry bridge self.check_loop_count(2) # 1 loop + 1 bridge - if self.optimizer != OPTIMIZER_FULL: - self.check_tree_loop_count(1) # 1 loop - else: - self.check_tree_loop_count(2) # 1 loop + 1 entry bridge (argh) + self.check_tree_loop_count(2) # 1 loop + 1 entry bridge (argh) self.check_aborted_count(0) def test_three_cases(self): @@ -476,10 +461,7 @@ return node.x res = self.meta_interp(f, [55]) assert res == f(55) - if self.optimizer != OPTIMIZER_FULL: - self.check_tree_loop_count(1) - else: - self.check_tree_loop_count(4) + self.check_tree_loop_count(4) def test_three_classes(self): class Base: @@ -509,10 +491,7 @@ return n res = self.meta_interp(f, [55], policy=StopAtXPolicy(extern)) assert res == f(55) - if self.optimizer != OPTIMIZER_FULL: - self.check_tree_loop_count(1) - else: - self.check_tree_loop_count(2) + self.check_tree_loop_count(2) def test_bug1(self): myjitdriver = JitDriver(greens = [], reds = ['n', 'node']) diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py --- a/pypy/rlib/jit.py +++ b/pypy/rlib/jit.py @@ -2,7 +2,7 @@ import sys from pypy.rpython.extregistry import ExtRegistryEntry from pypy.rlib.objectmodel import CDefinedIntSymbolic -from pypy.rlib.objectmodel import keepalive_until_here +from pypy.rlib.objectmodel import keepalive_until_here, specialize from pypy.rlib.unroll import unrolling_iterable from pypy.rlib.nonconst import NonConstant @@ -263,19 +263,15 @@ class JitHintError(Exception): """Inconsistency in the JIT hints.""" -OPTIMIZER_SIMPLE = 0 -OPTIMIZER_NO_UNROLL = 1 -OPTIMIZER_FULL = 2 - PARAMETERS = {'threshold': 1000, 'trace_eagerness': 200, 'trace_limit': 10000, - 'inlining': False, - 'optimizer': OPTIMIZER_FULL, + 'inlining': 0, 'loop_longevity': 1000, 'retrace_limit': 5, + 'enable_opts': None, # patched later by optimizeopt/__init__.py } -unroll_parameters = unrolling_iterable(PARAMETERS.keys()) +unroll_parameters = unrolling_iterable(PARAMETERS.items()) # ____________________________________________________________ @@ -332,14 +328,14 @@ # (internal, must receive a constant 'name') assert name in PARAMETERS + @specialize.arg(0, 1) def set_param(self, name, value): """Set one of the tunable JIT parameter.""" - for name1 in unroll_parameters: + for name1, _ in unroll_parameters: if name1 == name: self._set_param(name1, value) return raise ValueError("no such parameter") - set_param._annspecialcase_ = 'specialize:arg(0)' def set_user_param(self, text): """Set the tunable JIT parameters from a user-supplied string @@ -351,12 +347,17 @@ parts = s.split('=') if len(parts) != 2: raise ValueError - try: - value = int(parts[1]) - except ValueError: - raise # re-raise the ValueError (annotator hint) name = parts[0] - self.set_param(name, value) + value = parts[1] + if name == 'enable_opts': + self.set_param('enable_opts', value) + else: + for name1, _ in unroll_parameters: + if name1 == name and name1 != 'enable_opts': + try: + self.set_param(name1, int(value)) + except ValueError: + raise set_user_param._annspecialcase_ = 'specialize:arg(0)' def _make_extregistryentries(self): @@ -537,7 +538,10 @@ def compute_result_annotation(self, s_name, s_value): from pypy.annotation import model as annmodel assert s_name.is_constant() - assert annmodel.SomeInteger().contains(s_value) + if annmodel.SomeInteger().contains(s_value): + pass + else: + assert annmodel.SomeString().contains(s_value) return annmodel.s_None def specialize_call(self, hop): @@ -545,7 +549,7 @@ hop.exception_cannot_occur() driver = self.instance.im_self name = hop.args_s[0].const - v_value = hop.inputarg(lltype.Signed, arg=1) + v_value = hop.inputarg(hop.args_r[1], arg=1) vlist = [hop.inputconst(lltype.Void, "set_param"), hop.inputconst(lltype.Void, driver), hop.inputconst(lltype.Void, name), diff --git a/pypy/jit/metainterp/test/test_fficall.py b/pypy/jit/metainterp/test/test_fficall.py --- a/pypy/jit/metainterp/test/test_fficall.py +++ b/pypy/jit/metainterp/test/test_fficall.py @@ -40,6 +40,6 @@ n += 1 return res # - res = self.meta_interp(f, [0], jit_ffi=True) + res = self.meta_interp(f, [0]) return res diff --git a/pypy/jit/metainterp/test/test_del.py b/pypy/jit/metainterp/test/test_del.py --- a/pypy/jit/metainterp/test/test_del.py +++ b/pypy/jit/metainterp/test/test_del.py @@ -1,5 +1,5 @@ import py -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE +from pypy.rlib.jit import JitDriver from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin @@ -76,7 +76,7 @@ x += inst.foo n -= 1 return 1 - res = self.meta_interp(f, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [20], enable_opts='') assert res == 1 self.check_loops(call=1) # for the case B(), but not for the case A() diff --git a/pypy/jit/metainterp/test/test_ztranslation.py b/pypy/jit/metainterp/test/test_ztranslation.py --- a/pypy/jit/metainterp/test/test_ztranslation.py +++ b/pypy/jit/metainterp/test/test_ztranslation.py @@ -1,7 +1,7 @@ import py from pypy.jit.metainterp.warmspot import rpython_ll_meta_interp, ll_meta_interp from pypy.jit.backend.llgraph import runner -from pypy.rlib.jit import JitDriver, OPTIMIZER_FULL, unroll_parameters +from pypy.rlib.jit import JitDriver, unroll_parameters from pypy.rlib.jit import PARAMETERS, dont_look_inside from pypy.jit.metainterp.jitprof import Profiler from pypy.rpython.lltypesystem import lltype, llmemory @@ -51,8 +51,7 @@ set_jitcell_at=set_jitcell_at, get_printable_location=get_printable_location) def f(i): - for param in unroll_parameters: - defl = PARAMETERS[param] + for param, defl in unroll_parameters: jitdriver.set_param(param, defl) jitdriver.set_param("threshold", 3) jitdriver.set_param("trace_eagerness", 2) diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -5,37 +5,59 @@ from pypy.jit.metainterp.optimizeopt.heap import OptHeap from pypy.jit.metainterp.optimizeopt.string import OptString from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble +from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall +from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify +from pypy.rlib.jit import PARAMETERS -def optimize_loop_1(metainterp_sd, loop, unroll=True, +ALL_OPTS = [('intbounds', OptIntBounds), + ('rewrite', OptRewrite), + ('virtualize', OptVirtualize), + ('string', OptString), + ('heap', OptHeap), + ('ffi', OptFfiCall), + ('unroll', None)] +# no direct instantiation of unroll + +ALL_OPTS_DICT = dict.fromkeys([name for name, _ in ALL_OPTS]) + +ALL_OPTS_NAMES = ':'.join([name for name, _ in ALL_OPTS]) +PARAMETERS['enable_opts'] = ALL_OPTS_NAMES + +def optimize_loop_1(metainterp_sd, loop, enable_opts, inline_short_preamble=True, retraced=False): """Optimize loop.operations to remove internal overheadish operations. """ - opt_str = OptString() - optimizations = [OptIntBounds(), - OptRewrite(), - OptVirtualize(), - opt_str, - OptHeap(), - ] + optimizations = [] + unroll = 'unroll' in enable_opts + for name, opt in ALL_OPTS: + if name in enable_opts: + if opt is not None: + o = opt() + if unroll and name == 'string': + o.enabled = False + # FIXME: Workaround to disable string optimisation + # during preamble but to keep it during the loop + optimizations.append(o) + + if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: + optimizations.append(OptSimplify()) + if inline_short_preamble: - optimizations = [OptInlineShortPreamble(retraced)] + optimizations - - if metainterp_sd.jit_ffi: - from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall - optimizations = optimizations + [ - OptFfiCall(), - ] + optimizations = [OptInlineShortPreamble(retraced)] + optimizations if unroll: - opt_str.enabled = False # FIXME: Workaround to disable string optimisation - # during preamble but to keep it during the loop optimize_unroll(metainterp_sd, loop, optimizations) else: optimizer = Optimizer(metainterp_sd, loop, optimizations) optimizer.propagate_all_forward() -def optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble=True, - retraced=False): +def optimize_bridge_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble=True, retraced=False): """The same, but for a bridge. """ - optimize_loop_1(metainterp_sd, bridge, False, inline_short_preamble, - retraced) + enable_opts = enable_opts.copy() + try: + del enable_opts['unroll'] + except KeyError: + pass + optimize_loop_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble, retraced) diff --git a/pypy/jit/metainterp/test/test_tl.py b/pypy/jit/metainterp/test/test_tl.py --- a/pypy/jit/metainterp/test/test_tl.py +++ b/pypy/jit/metainterp/test/test_tl.py @@ -1,5 +1,4 @@ import py -from pypy.rlib.jit import OPTIMIZER_SIMPLE from pypy.jit.codewriter.policy import StopAtXPolicy from pypy.jit.metainterp.test.test_basic import OOJitMixin, LLJitMixin @@ -119,7 +118,7 @@ def main(num, arg): return interp(codes[num], inputarg=arg) - res = self.meta_interp(main, [0, 20], optimizer=OPTIMIZER_SIMPLE, + res = self.meta_interp(main, [0, 20], enable_opts='', listops=listops, backendopt=True, policy=policy) assert res == 0 diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -1275,11 +1275,6 @@ self._addr2name_keys = [key for key, value in list_of_addr2name] self._addr2name_values = [value for key, value in list_of_addr2name] - def setup_jitdrivers_sd(self, optimizer): - if optimizer is not None: - for jd in self.jitdrivers_sd: - jd.warmstate.set_param_optimizer(optimizer) - def finish_setup(self, codewriter, optimizer=None): from pypy.jit.metainterp.blackhole import BlackholeInterpBuilder self.blackholeinterpbuilder = BlackholeInterpBuilder(codewriter, self) @@ -1293,7 +1288,6 @@ self.jitdrivers_sd = codewriter.callcontrol.jitdrivers_sd self.virtualref_info = codewriter.callcontrol.virtualref_info self.callinfocollection = codewriter.callcontrol.callinfocollection - self.setup_jitdrivers_sd(optimizer) # # store this information for fastpath of call_assembler # (only the paths that can actually be taken) diff --git a/pypy/jit/metainterp/test/test_loop_unroll.py b/pypy/jit/metainterp/test/test_loop_unroll.py --- a/pypy/jit/metainterp/test/test_loop_unroll.py +++ b/pypy/jit/metainterp/test/test_loop_unroll.py @@ -1,10 +1,12 @@ import py -from pypy.rlib.jit import OPTIMIZER_FULL +from pypy.rlib.jit import JitDriver from pypy.jit.metainterp.test import test_loop from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_NAMES class LoopUnrollTest(test_loop.LoopTest): - optimizer = OPTIMIZER_FULL + enable_opts = ALL_OPTS_NAMES + automatic_promotion_result = { 'int_add' : 3, 'int_gt' : 1, 'guard_false' : 1, 'jump' : 1, } diff --git a/pypy/jit/metainterp/warmstate.py b/pypy/jit/metainterp/warmstate.py --- a/pypy/jit/metainterp/warmstate.py +++ b/pypy/jit/metainterp/warmstate.py @@ -7,8 +7,7 @@ from pypy.rlib.rarithmetic import intmask from pypy.rlib.nonconst import NonConstant from pypy.rlib.unroll import unrolling_iterable -from pypy.rlib.jit import (PARAMETERS, OPTIMIZER_SIMPLE, OPTIMIZER_FULL, - OPTIMIZER_NO_UNROLL) +from pypy.rlib.jit import PARAMETERS from pypy.rlib.jit import BaseJitCell from pypy.rlib.debug import debug_start, debug_stop, debug_print from pypy.jit.metainterp import history @@ -224,21 +223,16 @@ def set_param_inlining(self, value): self.inlining = value - def set_param_optimizer(self, optimizer): - if optimizer == OPTIMIZER_SIMPLE: - from pypy.jit.metainterp import simple_optimize - self.optimize_loop = simple_optimize.optimize_loop - self.optimize_bridge = simple_optimize.optimize_bridge - elif optimizer == OPTIMIZER_NO_UNROLL: - from pypy.jit.metainterp import nounroll_optimize - self.optimize_loop = nounroll_optimize.optimize_loop - self.optimize_bridge = nounroll_optimize.optimize_bridge - elif optimizer == OPTIMIZER_FULL: - from pypy.jit.metainterp import optimize - self.optimize_loop = optimize.optimize_loop - self.optimize_bridge = optimize.optimize_bridge - else: - raise ValueError("unknown optimizer") + def set_param_enable_opts(self, value): + from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT + + d = {} + for name in value.split(":"): + if name: + if name not in ALL_OPTS_DICT: + raise ValueError('Unknown optimization ' + name) + d[name] = None + self.enable_opts = d def set_param_loop_longevity(self, value): # note: it's a global parameter, not a per-jitdriver one @@ -294,12 +288,6 @@ """Entry point to the JIT. Called at the point with the can_enter_jit() hint. """ - if NonConstant(False): - # make sure we always see the saner optimizer from an - # annotation point of view, otherwise we get lots of - # blocked ops - self.set_param_optimizer(OPTIMIZER_FULL) - if vinfo is not None: virtualizable = args[num_green_args + index_of_virtualizable] virtualizable = vinfo.cast_to_vtype(virtualizable) diff --git a/pypy/jit/metainterp/test/test_compile.py b/pypy/jit/metainterp/test/test_compile.py --- a/pypy/jit/metainterp/test/test_compile.py +++ b/pypy/jit/metainterp/test/test_compile.py @@ -7,7 +7,7 @@ from pypy.jit.metainterp import nounroll_optimize, jitprof, typesystem, compile from pypy.jit.metainterp.test.test_optimizeutil import LLtypeMixin from pypy.jit.tool.oparser import parse - +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT def test_insert_loop_token(): # XXX this test is a bit useless now that there are no specnodes @@ -39,6 +39,7 @@ class FakeState: optimize_loop = staticmethod(nounroll_optimize.optimize_loop) + enable_opts = {} def attach_unoptimized_bridge_from_interp(*args): pass diff --git a/pypy/jit/metainterp/test/test_recursive.py b/pypy/jit/metainterp/test/test_recursive.py --- a/pypy/jit/metainterp/test/test_recursive.py +++ b/pypy/jit/metainterp/test/test_recursive.py @@ -1,5 +1,5 @@ import py -from pypy.rlib.jit import JitDriver, we_are_jitted, OPTIMIZER_SIMPLE, hint +from pypy.rlib.jit import JitDriver, we_are_jitted, hint from pypy.rlib.jit import unroll_safe, dont_look_inside from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.debug import fatalerror @@ -25,7 +25,7 @@ return f(n+1) else: return 1 - res = self.meta_interp(main, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(main, [20], enable_opts='') assert res == main(20) self.check_history(call=0) @@ -52,7 +52,7 @@ return f(n+1) else: return 1 - res = self.meta_interp(main, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(main, [20], enable_opts='') assert res == main(20) def test_recursion_three_times(self): @@ -75,7 +75,7 @@ print for i in range(1, 11): print '%3d %9d' % (i, f(i)) - res = self.meta_interp(main, [10], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(main, [10], enable_opts='') assert res == main(10) self.check_enter_count_at_most(11) @@ -95,7 +95,7 @@ opaque(n, i) i += 1 return stack.pop() - res = self.meta_interp(f, [1], optimizer=OPTIMIZER_SIMPLE, repeat=2, + res = self.meta_interp(f, [1], enable_opts='', repeat=2, policy=StopAtXPolicy(opaque)) assert res == 1 @@ -142,9 +142,9 @@ codes = [code, subcode] f = self.get_interpreter(codes) - assert self.meta_interp(f, [0, 0, 0], optimizer=OPTIMIZER_SIMPLE) == 42 + assert self.meta_interp(f, [0, 0, 0], enable_opts='') == 42 self.check_loops(int_add = 1, call_may_force = 1, call = 0) - assert self.meta_interp(f, [0, 0, 0], optimizer=OPTIMIZER_SIMPLE, + assert self.meta_interp(f, [0, 0, 0], enable_opts='', inline=True) == 42 self.check_loops(int_add = 2, call_may_force = 0, call = 0, guard_no_exception = 0) @@ -156,7 +156,7 @@ f = self.get_interpreter(codes) - assert self.meta_interp(f, [0, 0, 0], optimizer=OPTIMIZER_SIMPLE, + assert self.meta_interp(f, [0, 0, 0], enable_opts='', inline=True) == 42 # the call is fully inlined, because we jump to subcode[1], thus # skipping completely the JUMP_BACK in subcode[0] @@ -193,7 +193,7 @@ def main(n): return f("c-l", n) print main(100) - res = self.meta_interp(main, [100], optimizer=OPTIMIZER_SIMPLE, inline=True) + res = self.meta_interp(main, [100], enable_opts='', inline=True) assert res == 0 def test_guard_failure_and_then_exception_in_inlined_function(self): @@ -234,7 +234,7 @@ def main(n): return f("c-l", n) print main(1000) - res = self.meta_interp(main, [1000], optimizer=OPTIMIZER_SIMPLE, inline=True) + res = self.meta_interp(main, [1000], enable_opts='', inline=True) assert res == main(1000) def test_exception_in_inlined_function(self): @@ -274,7 +274,7 @@ return n def main(n): return f("c-l", n) - res = self.meta_interp(main, [100], optimizer=OPTIMIZER_SIMPLE, inline=True) + res = self.meta_interp(main, [100], enable_opts='', inline=True) assert res == main(100) def test_recurse_during_blackholing(self): @@ -312,7 +312,7 @@ myjitdriver.set_param('trace_eagerness', 5) return f("c-l", n) expected = main(100) - res = self.meta_interp(main, [100], optimizer=OPTIMIZER_SIMPLE, inline=True) + res = self.meta_interp(main, [100], enable_opts='', inline=True) assert res == expected def check_max_trace_length(self, length): @@ -338,7 +338,7 @@ n -= 1 return n TRACE_LIMIT = 66 - res = self.meta_interp(loop, [100], optimizer=OPTIMIZER_SIMPLE, inline=True, trace_limit=TRACE_LIMIT) + res = self.meta_interp(loop, [100], enable_opts='', inline=True, trace_limit=TRACE_LIMIT) assert res == 0 self.check_max_trace_length(TRACE_LIMIT) self.check_enter_count_at_most(10) # maybe @@ -363,7 +363,7 @@ n -= 1 return n TRACE_LIMIT = 20 - res = self.meta_interp(loop, [100], optimizer=OPTIMIZER_SIMPLE, inline=True, trace_limit=TRACE_LIMIT) + res = self.meta_interp(loop, [100], enable_opts='', inline=True, trace_limit=TRACE_LIMIT) self.check_max_trace_length(TRACE_LIMIT) self.check_aborted_count(8) self.check_enter_count_at_most(30) @@ -489,10 +489,10 @@ myjitdriver.set_param('inlining', False) return loop(100) - res = self.meta_interp(main, [0], optimizer=OPTIMIZER_SIMPLE, trace_limit=TRACE_LIMIT) + res = self.meta_interp(main, [0], enable_opts='', trace_limit=TRACE_LIMIT) self.check_loops(call_may_force=1, call=0) - res = self.meta_interp(main, [1], optimizer=OPTIMIZER_SIMPLE, trace_limit=TRACE_LIMIT) + res = self.meta_interp(main, [1], enable_opts='', trace_limit=TRACE_LIMIT) self.check_loops(call_may_force=0, call=0) def test_trace_from_start(self): diff --git a/pypy/jit/backend/test/support.py b/pypy/jit/backend/test/support.py --- a/pypy/jit/backend/test/support.py +++ b/pypy/jit/backend/test/support.py @@ -1,8 +1,8 @@ import py import sys from pypy.rlib.debug import debug_print -from pypy.rlib.jit import OPTIMIZER_FULL from pypy.translator.translator import TranslationContext, graphof +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_NAMES class BaseCompiledMixin(object): @@ -67,7 +67,7 @@ jd.warmstate.set_param_trace_eagerness(2) # for tests jd.warmstate.set_param_trace_limit(trace_limit) jd.warmstate.set_param_inlining(inline) - jd.warmstate.set_param_optimizer(OPTIMIZER_FULL) + jd.warmstate.set_param_enable_opts(ALL_OPTS_NAMES) mixlevelann = warmrunnerdesc.annhelper entry_point_graph = mixlevelann.getgraph(entry_point, [s_list_of_strings], annmodel.SomeInteger()) diff --git a/pypy/jit/metainterp/test/test_loop.py b/pypy/jit/metainterp/test/test_loop.py --- a/pypy/jit/metainterp/test/test_loop.py +++ b/pypy/jit/metainterp/test/test_loop.py @@ -1,5 +1,5 @@ import py -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, OPTIMIZER_FULL +from pypy.rlib.jit import JitDriver from pypy.rlib.objectmodel import compute_hash from pypy.jit.metainterp.warmspot import ll_meta_interp, get_stats from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin @@ -8,14 +8,15 @@ from pypy.jit.metainterp import history class LoopTest(object): - optimizer = OPTIMIZER_SIMPLE + enable_opts = '' + automatic_promotion_result = { 'int_add' : 6, 'int_gt' : 1, 'guard_false' : 1, 'jump' : 1, 'guard_value' : 3 } def meta_interp(self, f, args, policy=None): - return ll_meta_interp(f, args, optimizer=self.optimizer, + return ll_meta_interp(f, args, enable_opts=self.enable_opts, policy=policy, CPUClass=self.CPUClass, type_system=self.type_system) @@ -58,7 +59,7 @@ res = self.meta_interp(f, [6, 13]) assert res == f(6, 13) self.check_loop_count(1) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loops(getfield_gc = 0, setfield_gc = 1) def test_loop_with_two_paths(self): @@ -87,7 +88,7 @@ return res * 2 res = self.meta_interp(f, [6, 33], policy=StopAtXPolicy(l)) assert res == f(6, 33) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(3) else: self.check_loop_count(2) @@ -105,7 +106,7 @@ pattern >>= 1 return 42 self.meta_interp(f, [0xF0F0F0]) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(3) else: self.check_loop_count(2) @@ -547,7 +548,7 @@ res = self.meta_interp(f, [100, 5], policy=StopAtXPolicy(externfn)) assert res == expected - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(2) self.check_tree_loop_count(2) # 1 loop, 1 bridge from interp else: @@ -751,7 +752,6 @@ res = self.meta_interp(f, [200]) - class TestOOtype(LoopTest, OOJitMixin): pass diff --git a/pypy/jit/metainterp/test/test_send_nounroll.py b/pypy/jit/metainterp/test/test_send_nounroll.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_nounroll.py +++ /dev/null @@ -1,30 +0,0 @@ - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_NO_UNROLL -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopNoPSpecTest(test_send.SendTests): - optimizer=OPTIMIZER_NO_UNROLL - - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - - -class TestLLtype(LoopNoPSpecTest, LLJitMixin): - pass - -class TestOOtype(LoopNoPSpecTest, OOJitMixin): - pass diff --git a/pypy/jit/metainterp/test/test_send_simple.py b/pypy/jit/metainterp/test/test_send_simple.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_simple.py +++ /dev/null @@ -1,29 +0,0 @@ -# xxx mostly pointless - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_SIMPLE -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopDummyTest(test_send.SendTests): - optimizer=OPTIMIZER_SIMPLE - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - -class TestLLtype(LoopDummyTest, LLJitMixin): - pass - -class TestOOtype(LoopDummyTest, OOJitMixin): - pass diff --git a/pypy/jit/metainterp/compile.py b/pypy/jit/metainterp/compile.py --- a/pypy/jit/metainterp/compile.py +++ b/pypy/jit/metainterp/compile.py @@ -85,6 +85,8 @@ """Try to compile a new loop by closing the current history back to the first operation. """ + from pypy.jit.metainterp.optimize import optimize_loop + history = metainterp.history loop = create_empty_loop(metainterp) loop.inputargs = history.inputargs @@ -105,8 +107,8 @@ loop.preamble.start_resumedescr = start_resumedescr try: - old_loop_token = jitdriver_sd.warmstate.optimize_loop( - metainterp_sd, old_loop_tokens, loop) + old_loop_token = optimize_loop(metainterp_sd, old_loop_tokens, loop, + jitdriver_sd.warmstate.enable_opts) except InvalidLoop: return None if old_loop_token is not None: @@ -570,6 +572,8 @@ """Try to compile a new bridge leading from the beginning of the history to some existing place. """ + from pypy.jit.metainterp.optimize import optimize_bridge + # The history contains new operations to attach as the code for the # failure of 'resumekey.guard_op'. # @@ -586,10 +590,9 @@ else: inline_short_preamble = True try: - target_loop_token = state.optimize_bridge(metainterp_sd, - old_loop_tokens, new_loop, - inline_short_preamble, - retraced) + target_loop_token = optimize_bridge(metainterp_sd, old_loop_tokens, + new_loop, state.enable_opts, + inline_short_preamble, retraced) except InvalidLoop: # XXX I am fairly convinced that optimize_bridge cannot actually raise # InvalidLoop diff --git a/pypy/jit/metainterp/test/test_warmspot.py b/pypy/jit/metainterp/test/test_warmspot.py --- a/pypy/jit/metainterp/test/test_warmspot.py +++ b/pypy/jit/metainterp/test/test_warmspot.py @@ -1,7 +1,7 @@ import py from pypy.jit.metainterp.warmspot import ll_meta_interp from pypy.jit.metainterp.warmspot import get_stats -from pypy.rlib.jit import JitDriver, OPTIMIZER_FULL, OPTIMIZER_SIMPLE +from pypy.rlib.jit import JitDriver from pypy.rlib.jit import unroll_safe from pypy.jit.backend.llgraph import runner from pypy.jit.metainterp.history import BoxInt diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py --- a/pypy/jit/metainterp/warmspot.py +++ b/pypy/jit/metainterp/warmspot.py @@ -25,6 +25,7 @@ from pypy.jit.metainterp.jitdriver import JitDriverStaticData from pypy.jit.codewriter import support, codewriter, longlong from pypy.jit.codewriter.policy import JitPolicy +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_NAMES # ____________________________________________________________ # Bootstrapping @@ -62,7 +63,8 @@ def jittify_and_run(interp, graph, args, repeat=1, backendopt=False, trace_limit=sys.maxint, - inline=False, loop_longevity=0, retrace_limit=5, **kwds): + inline=False, loop_longevity=0, retrace_limit=5, + enable_opts=ALL_OPTS_NAMES, **kwds): from pypy.config.config import ConfigError translator = interp.typer.annotator.translator try: @@ -81,6 +83,7 @@ jd.warmstate.set_param_inlining(inline) jd.warmstate.set_param_loop_longevity(loop_longevity) jd.warmstate.set_param_retrace_limit(retrace_limit) + jd.warmstate.set_param_enable_opts(enable_opts) warmrunnerdesc.finish() res = interp.eval_graph(graph, args) if not kwds.get('translate_support_code', False): @@ -145,8 +148,7 @@ class WarmRunnerDesc(object): def __init__(self, translator, policy=None, backendopt=True, CPUClass=None, - optimizer=None, ProfilerClass=EmptyProfiler, - jit_ffi=None, **kwds): + ProfilerClass=EmptyProfiler, **kwds): pyjitpl._warmrunnerdesc = self # this is a global for debugging only! self.set_translator(translator) self.memory_manager = memmgr.MemoryManager() @@ -165,7 +167,7 @@ elif self.opt.listops: self.prejit_optimizations_minimal_inline(policy, graphs) - self.build_meta_interp(ProfilerClass, jit_ffi) + self.build_meta_interp(ProfilerClass) self.make_args_specifications() # from pypy.jit.metainterp.virtualref import VirtualRefInfo @@ -184,7 +186,7 @@ self.rewrite_set_param() self.rewrite_force_virtual(vrefinfo) self.add_finish() - self.metainterp_sd.finish_setup(self.codewriter, optimizer=optimizer) + self.metainterp_sd.finish_setup(self.codewriter) def finish(self): vinfos = set([jd.virtualizable_info for jd in self.jitdrivers_sd]) @@ -283,14 +285,11 @@ translate_support_code, gcdescr=self.gcdescr) self.cpu = cpu - def build_meta_interp(self, ProfilerClass, jit_ffi=None): - if jit_ffi is None: - jit_ffi = self.translator.config.translation.jit_ffi + def build_meta_interp(self, ProfilerClass): self.metainterp_sd = MetaInterpStaticData(self.cpu, self.opt, ProfilerClass=ProfilerClass, - warmrunnerdesc=self, - jit_ffi=jit_ffi) + warmrunnerdesc=self) def make_virtualizable_infos(self): vinfos = {} @@ -786,16 +785,26 @@ annhelper = self.annhelper) def rewrite_set_param(self): + from pypy.rpython.lltypesystem.rstr import STR + closures = {} graphs = self.translator.graphs _, PTR_SET_PARAM_FUNCTYPE = self.cpu.ts.get_FuncType([lltype.Signed], lltype.Void) - def make_closure(jd, fullfuncname): + _, PTR_SET_PARAM_STR_FUNCTYPE = self.cpu.ts.get_FuncType( + [lltype.Ptr(STR)], lltype.Void) + def make_closure(jd, fullfuncname, is_string): state = jd.warmstate def closure(i): + if is_string: + i = hlstr(i) getattr(state, fullfuncname)(i) - funcptr = self.helper_func(PTR_SET_PARAM_FUNCTYPE, closure) - return Constant(funcptr, PTR_SET_PARAM_FUNCTYPE) + if is_string: + TP = PTR_SET_PARAM_STR_FUNCTYPE + else: + TP = PTR_SET_PARAM_FUNCTYPE + funcptr = self.helper_func(TP, closure) + return Constant(funcptr, TP) # for graph, block, i in find_set_param(graphs): op = block.operations[i] @@ -807,7 +816,8 @@ funcname = op.args[2].value key = jd, funcname if key not in closures: - closures[key] = make_closure(jd, 'set_param_' + funcname) + closures[key] = make_closure(jd, 'set_param_' + funcname, + funcname == 'enable_opts') op.opname = 'direct_call' op.args[:3] = [closures[key]] diff --git a/pypy/jit/metainterp/nounroll_optimize.py b/pypy/jit/metainterp/nounroll_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/nounroll_optimize.py +++ /dev/null @@ -1,36 +0,0 @@ - -from pypy.rlib.debug import debug_start, debug_stop -from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1 - -def optimize_loop(metainterp_sd, old_loop_tokens, loop): - debug_start("jit-optimize") - try: - return _optimize_loop(metainterp_sd, old_loop_tokens, loop) - finally: - debug_stop("jit-optimize") - -def _optimize_loop(metainterp_sd, old_loop_tokens, loop): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) - if old_loop_tokens: - return old_loop_tokens[0] - optimize_loop_1(metainterp_sd, loop, False) - return None - -def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, - inline_short_preamble, retraced=False): - debug_start("jit-optimize") - try: - return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge) - finally: - debug_stop("jit-optimize") - -def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) - if old_loop_tokens: - old_loop_token = old_loop_tokens[0] - bridge.operations[-1].setdescr(old_loop_token) # patch jump target - optimize_bridge_1(metainterp_sd, bridge) - return old_loop_token - return None diff --git a/pypy/jit/metainterp/test/test_slist.py b/pypy/jit/metainterp/test/test_slist.py --- a/pypy/jit/metainterp/test/test_slist.py +++ b/pypy/jit/metainterp/test/test_slist.py @@ -1,8 +1,8 @@ import py from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE +from pypy.rlib.jit import JitDriver -class ListTests: +class ListTests(object): def test_basic_list(self): py.test.skip("not yet") @@ -60,7 +60,7 @@ myjitdriver.jit_merge_point(n=n, lst=lst) n -= 1 return lst[n] - res = self.meta_interp(f, [21], listops=True, optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [21], listops=True, enable_opts='') assert res == 0 def test_getitem(self): diff --git a/pypy/jit/metainterp/test/test_exception.py b/pypy/jit/metainterp/test/test_exception.py --- a/pypy/jit/metainterp/test/test_exception.py +++ b/pypy/jit/metainterp/test/test_exception.py @@ -1,6 +1,6 @@ import py, sys from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, dont_look_inside +from pypy.rlib.jit import JitDriver, dont_look_inside from pypy.rlib.rarithmetic import ovfcheck, LONG_BIT, intmask from pypy.jit.codewriter.policy import StopAtXPolicy @@ -446,10 +446,10 @@ n += 1 return m - res = self.meta_interp(f, [1, 1, 0], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [1, 1, 0], enable_opts='') assert res == f(1, 1, 0) res = self.meta_interp(f, [809644098, 16, 0], - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res == f(809644098, 16, 0) def test_int_neg_ovf(self): @@ -470,7 +470,7 @@ return m res = self.meta_interp(f, [-sys.maxint-1+100, 0], - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res == 16 def test_reraise_through_portal(self): @@ -551,7 +551,7 @@ return 8 res = self.meta_interp(main, [41], repeat=7, policy=StopAtXPolicy(x), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res == 8 def test_overflowerror_escapes(self): From commits-noreply at bitbucket.org Thu Mar 10 23:41:48 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 10 Mar 2011 23:41:48 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Check in the html. Message-ID: <20110310224148.587E0282BD7@codespeak.net> Author: Armin Rigo Branch: extradoc Changeset: r3374:2c7a3a8b004e Date: 2011-03-10 17:41 -0500 http://bitbucket.org/pypy/extradoc/changeset/2c7a3a8b004e/ Log: Check in the html. diff --git a/talk/ustour2011/yelp-talk.html b/talk/ustour2011/yelp-talk.html new file mode 100644 --- /dev/null +++ b/talk/ustour2011/yelp-talk.html @@ -0,0 +1,754 @@ + + + + + + + +PyPy + + + + + + + + + + + + + + +
+
+
+ + +
+
+
+

PyPy

+ + + + + + + + +
+

Armin Rigo

+
    +
  • Heinrich-Heine Universität, Germany
  • +
  • Open End AB, Sweden
  • +
+

March 2011

+
+ +
+
+

Introduction

+
    +
  • The PyPy project (1): a framework in which to write interpreters for +complicated dynamic languages
  • +
  • The PyPy project (2): a Python interpreter, supporting the complete +Python 2.7
  • +
+
+
+

CPython and PyPy

+
+
+

CPython and PyPy

+
    +
  • Two implementations
  • +
  • Two interpreters
  • +
  • CPython is written in C, PyPy is written in Python
  • +
  • PyPy tries to be equivalent to CPython
  • +
+
+
+

...and Jython and IronPython

+
    +
  • Jython: Python for the Java VM
  • +
  • IronPython: Python for .NET
  • +
  • Both try to integrate well with their VM
  • +
+
+
+

What is PyPy

+
    +
  • A project started in 2003
  • +
  • An Open Source effort of volunteers
  • +
  • With some funding support: 2 years from the European Union (2005-2007), +and now from Germany and Sweden (2010-2011).
  • +
+
+
+

What is PyPy

+
    +
  • Test-driven development
  • +
  • Now contains about 200 KLoC, and 150 KLoc of tests
  • +
+
+
+

What is the point of PyPy?

+
    +
  • CPython is older, it's the "official" version
  • +
  • PyPy is just a replacement, so why?
  • +
  • Moreover PyPy is not quite complete (e.g. C extension +modules are only partially supported)
  • +
+
+
+

Speed

+
    +
  • First answer: PyPy is faster, and may use less memory
  • +
  • ...or at least, it is "often" the case
  • +
+
+ +
+

And (optionally) extra features

+
    +
  • "Stackless"
  • +
  • Non-Python interpreters
  • +
  • and many smaller experiments
  • +
  • it is a better experimentation platform than CPython
  • +
+
+
+

Multi-threading

+
    +
  • Bad support on CPython (GIL)
  • +
  • PyPy has no answer to this question (there is also a GIL)
  • +
+
+
+

PyPy for the user

+
+
+

Speed

+speed.png +
+
+

Speed (2)

+speed2.png +
+
+

Memory usage

+
    +
  • Depends on the use case
  • +
  • Much better than CPython for instances of classes with no __slots__
  • +
  • On running PyPy's translation toolchain on 32-bits: 1.7GB with PyPy +(including the JIT machine code), versus 1.2GB with CPython
  • +
  • Experimental support for 32-bit "compact pointers" on 64-bit platforms
  • +
+
+
+

Just-in-Time Compilation

+
    +
  • Tracing JIT, like TraceMonkey
  • +
  • Complete by construction
  • +
  • Supports Intel x86, amd64, and soon ARM
  • +
+
+
+

Compatibility

+
    +
  • "Full" compatibility with CPython
  • +
  • More so than, say, Jython or IronPython
  • +
  • Main difference: Garbage Collection is not refcounting (because we +could get much better GCs) --- so __del__ methods are not called +immediately and predictively
  • +
  • Apart from that, it is really 99.99% compatible
  • +
+
+
+

Stackless Python

+
    +
  • Supports Stackless Python (microthreads)
  • +
  • In-progress: not integrated with the JIT so far
  • +
+
+
+

CPyExt

+
    +
  • A layer that integrates existing CPython C extension modules
  • +
  • Does not support all the details of the CPython C API
  • +
  • For some extension modules, we can have a performance issue
  • +
  • Work in progress
  • +
+
+
+

CPyExt works "often"

+
    +
  • wxPython
  • +
  • PIL
  • +
  • Boost
  • +
  • cx_Oracle
  • +
  • mysqldb
  • +
  • pycairo
  • +
+
+
+

Using CPyExt

+
    +
  • The C sources need recompiling
  • +
  • Sadly, they often contain a few details to fix
  • +
  • (typically, bad usage of reference counts)
  • +
+
+
+

Other ways to use C libraries

+
    +
  • Use ctypes (it is soon going to be fast on top of PyPy). +Example: pyexpat, sqlite3
  • +
  • Or write it as an RPython module built into PyPy, +but that's more involved
  • +
  • More ways could be possible, given work (SWIG backend, +Cython backend, C++ Reflex, etc...)
  • +
+
+
+

Architecture

+
+
+

Architecture

+

PyPy has two parts:

+
    +
  • A Python interpreter, written in RPython
  • +
  • A compilation toolchain -- the "translator" -- that translates +RPython code into C code (mainly)
  • +
+
+
+

PyPy's Python interpreter

+
    +
  • A priori similar to CPython, but written in RPython.
  • +
  • RPython is also valid Python: we test extensively by running +it on top of CPython
  • +
+
+
+

The translation toolchain

+
    +
  • Takes a program written in RPython, a custom subset of Python
  • +
  • Outputs the "same" program written in C
  • +
+
+
+

RPython is still mostly Python

+
    +
  • Completely valid Python (can be tested directly)
  • +
  • Can use lists, dicts, tuples, classes and instances, and so on, +but it must be type-safe
  • +
  • Contains no garbage collection detail (Py_INCREF/Py_DECREF in CPython)
  • +
  • Really a subset of Python: roughly "how a Java programmer writes his +first Python program"
  • +
  • ...well, plus tons of tricks :-)
  • +
+
+
+

RPython meta-programming

+
    +
  • RPython is actually only a restriction on the code after being imported, +so we can build up everything in (normal) full Python:

    +
    +for name in ["add", "sub", "mul"]:
    +    def f(x, y):
    +        ...
    +    globals()[name] = f
    +
    +
  • +
  • here, the code in f() is RPython, but the loop around it is not.

    +
  • +
+
+
+

Translation toolchain

+
    +
  • "Translation toolchain": statically compiles RPython code
  • +
  • Produces C code (or JVM or .NET code, experimentally)
  • +
  • Every aspect that is independent from the high-level +description of the interpreter is left out of RPython
  • +
  • Instead, they are added during translation
  • +
+
+
+

Translation overview (1)

+
    +
  • Start with the live RPython program
  • +
  • Build the Control Flow Graphs (CFGs) of the functions
  • +
  • Perform global type inference
  • +
  • We get a type-annotated version of the CFGs
  • +
  • Demo
  • +
+
+
+

Translation overview (2)

+
    +
  • "Lower" the level of the CFGs: transform their Python-like operations +into C-like operations
  • +
  • Do a number of additional transformations to insert the selected "aspects"
  • +
  • Generate C code from the low-level CFGs
  • +
+
+
+

Various aspects

+
    +
  • The object model, e.g. how to turn RPython classes and instances +to C structs
  • +
  • Garbage collection
  • +
  • Execution model: regular (recursive) or stackless
  • +
  • Just-in-Time compiler
  • +
+
+
+

Just-in-Time Compiler

+
+
+

Goal

+
    +
  • Speed up the interpreter written in RPython
  • +
  • Independent of the language that is being interpreted
  • +
  • Let us call it the P-interpreter (P = Python or other)
  • +
+
+
+

What is a JIT

+
    +
  • A JIT selects pieces of the user program (in language P) that would benefit +from compilation instead of interpretation
  • +
  • A "method JIT" selects individual P functions and compiles them, +possibly doing some inlining to improve performance (HotSpot, Psyco)
  • +
  • A "tracing JIT" selects individual code paths from loops and compiles +them, inlining aggressively (TraceMonkey, PyPy)
  • +
+
+
+

Tracing

+
    +
  • Run the user program, and do some lightweight profiling of loops
  • +
  • When a loop is run often enough, enter "Tracing Mode"
  • +
  • Run one more iteration of the loop in this mode
  • +
  • In addition to actually running the next iteration, it records a "trace"
  • +
+
+
+

Tracing (2)

+
    +
  • The trace is then turned into a machine code loop, and directly executed
  • +
  • Runs all the further iterations of the loop
  • +
+
+
+

Tracing (3)

+
    +
  • The machine code contains "guards" checking that all conditions met +during tracing are still valid
  • +
  • When a guard fails (latest: at the end of the loop), we fall back to +the regular P-interpreter
  • +
+
+
+

Meta-Tracing in PyPy

+
    +
  • The explanation above assumes a tracing JIT for the full Python +language
  • +
  • Would need to be maintained whenever we change the Python version we +support
  • +
  • Instead, we have a "meta-tracing JIT"
  • +
  • We trace the P-interpreter's main loop (running N times) interpreting +a P loop (running once)
  • +
+
+
+

Demo

+
+
+

Architecture of the PyPy JIT

+
    +
  • In advance, turn the CFGs of the P-interpreter into some bytecode +representation called "jitcode"
  • +
  • Uses some hints provided by the P-interpreter author (but not many)
  • +
  • "Links" into the P-interpreter's bytecode dispatch loop
  • +
  • In this way we add lightweight profiling code
  • +
+
+
+

Meta-Tracing

+
    +
  • When thresholds are reached, we start tracing
  • +
  • Tracing is done by running the "jitcodes" in a custom interpreter, +and recording a trace of all operations performed
  • +
  • Tracing is slow (double interpretation) but only runs for one iteration +of the loop
  • +
+
+
+

Optimization

+
    +
  • Advanced optimizations of the trace: escaping analysis, integer bounds, +store sinking, string handling, FFI calls, unrolling, virtualrefs...
  • +
+
+
+

Machine Code Backend

+
    +
  • Turns a trace into machine code
  • +
  • Simple register allocation (linear code)
  • +
  • x86, x86-64, (ARM)
  • +
  • Guards compiled as conditional jumps to code that restores the full state
  • +
+
+
+

Blackhole interpreter

+
    +
  • When a guard fails, we need to go back to the regular P-interpreter
  • +
  • Cannot easily re-enter the P-interpreter from anywhere, because it +is just C code
  • +
  • Instead we use one more interpreter, the "blackhole interpreter".
  • +
+
+
+

Bridges

+
    +
  • When a guard fails often enough, run again the JIT from there
  • +
  • Meta-trace, optimize, generate machine code, run it
  • +
  • Such extra traces are called "bridges" instead of "loops"
  • +
  • In practice, most loops end up needing some number of bridges
  • +
  • We get "trees" of machine code
  • +
+
+
+

More topics

+
    +
  • Loops, bridges and "preamble loops"
  • +
  • Virtualizables
  • +
  • GC integration
  • +
  • Memory management of machine code
  • +
  • ...
  • +
+
+
+

Conclusion

+
+
+

Conclusion

+ +
+
+ + From commits-noreply at bitbucket.org Thu Mar 10 23:44:48 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 10 Mar 2011 23:44:48 +0100 (CET) Subject: [pypy-svn] pypy default: kleptog: Support for the PyOS_[gs]etsig functions Message-ID: <20110310224448.1E6BA282BA1@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42499:538a36110333 Date: 2011-03-10 23:39 +0100 http://bitbucket.org/pypy/pypy/changeset/538a36110333/ Log: kleptog: Support for the PyOS_[gs]etsig functions diff --git a/pypy/module/cpyext/src/pysignals.c b/pypy/module/cpyext/src/pysignals.c new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/src/pysignals.c @@ -0,0 +1,53 @@ +#include +#include + +/* From pythonrun.c in the standard Python distribution */ + +/* Wrappers around sigaction() or signal(). */ + +/* It may seem odd that these functions do not interact with the rest of the + * system (i.e. their effects are not visible in the signal module) but + * this is apparently intentional, CPython works the same way. The signal + * handlers defined in the signal module define what happens if the normal + * Python signal handler is called. + * + * A bit whacky, but that's the way it is */ + +PyOS_sighandler_t +PyOS_getsig(int sig) +{ +#ifdef SA_RESTART + /* assume sigaction exists */ + struct sigaction context; + if (sigaction(sig, NULL, &context) == -1) + return SIG_ERR; + return context.sa_handler; +#else + PyOS_sighandler_t handler; + handler = signal(sig, SIG_IGN); + if (handler != SIG_ERR) + signal(sig, handler); + return handler; +#endif +} + +PyOS_sighandler_t +PyOS_setsig(int sig, PyOS_sighandler_t handler) +{ +#ifdef SA_RESTART + /* assume sigaction exists */ + struct sigaction context, ocontext; + context.sa_handler = handler; + sigemptyset(&context.sa_mask); + context.sa_flags = 0; + if (sigaction(sig, &context, &ocontext) == -1) + return SIG_ERR; + return ocontext.sa_handler; +#else + PyOS_sighandler_t oldhandler; + oldhandler = signal(sig, handler); + /* should check if this exists */ + siginterrupt(sig, 1); + return oldhandler; +#endif +} diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -24,7 +24,6 @@ _inittab = rffi.VOIDP PyThreadState = rffi.VOIDP PyInterpreterState = rffi.VOIDP -PyOS_sighandler_t = rffi.VOIDP Py_UNICODE = lltype.UniChar PyCompilerFlags = rffi.VOIDP _node = rffi.VOIDP @@ -2313,22 +2312,6 @@ own code.""" raise NotImplementedError - at cpython_api([rffi.INT_real], PyOS_sighandler_t) -def PyOS_getsig(space, i): - """Return the current signal handler for signal i. This is a thin wrapper around - either sigaction() or signal(). Do not call those functions - directly! PyOS_sighandler_t is a typedef alias for void - (*)(int).""" - raise NotImplementedError - - at cpython_api([rffi.INT_real, PyOS_sighandler_t], PyOS_sighandler_t) -def PyOS_setsig(space, i, h): - """Set the signal handler for signal i to be h; return the old signal handler. - This is a thin wrapper around either sigaction() or signal(). Do - not call those functions directly! PyOS_sighandler_t is a typedef - alias for void (*)(int).""" - raise NotImplementedError - @cpython_api([rffi.CCHARP, FILE], FILE) def PySys_GetFile(space, name, default): """Return the FILE* associated with the object name in the diff --git a/pypy/module/cpyext/include/pysignals.h b/pypy/module/cpyext/include/pysignals.h new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/include/pysignals.h @@ -0,0 +1,19 @@ + +/* signal interface */ + +#ifndef Py_PYSIGNALS_H +#define Py_PYSIGNALS_H +#ifdef __cplusplus +extern "C" { +#endif + +typedef void (*PyOS_sighandler_t)(int); + +PyOS_sighandler_t PyOS_setsig(int sig, PyOS_sighandler_t handler); +PyOS_sighandler_t PyOS_getsig(int sig); + + +#ifdef __cplusplus +} +#endif +#endif /* !Py_PYSIGNALS_H */ diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -331,6 +331,8 @@ 'PyCapsule_SetContext', 'PyCapsule_Import', 'PyCapsule_Type', 'init_capsule', 'PyObject_AsReadBuffer', 'PyObject_AsWriteBuffer', 'PyObject_CheckReadBuffer', + + 'PyOS_getsig', 'PyOS_setsig', 'PyStructSequence_InitType', 'PyStructSequence_New', ] @@ -886,6 +888,7 @@ source_dir / "cobject.c", source_dir / "structseq.c", source_dir / "capsule.c", + source_dir / "pysignals.c", ], separate_module_sources=separate_module_sources, export_symbols=export_symbols_eci, diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -118,6 +118,7 @@ #include "datetime.h" #include "pystate.h" #include "fileobject.h" +#include "pysignals.h" // XXX This shouldn't be included here #include "structmember.h" diff --git a/pypy/module/cpyext/test/test_pysignals.py b/pypy/module/cpyext/test/test_pysignals.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/test/test_pysignals.py @@ -0,0 +1,30 @@ +from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase + +class AppTestBufferObject(AppTestCpythonExtensionBase): + def test_signals(self): + module = self.import_extension('foo', [ + ("test_signals", "METH_NOARGS", + """ + PyOS_sighandler_t handler = SIG_IGN; + PyOS_sighandler_t oldhandler; + int result = 0; + + oldhandler = PyOS_getsig(SIGUSR1); + + handler = PyOS_setsig(SIGUSR1, SIG_IGN); + + if( oldhandler != handler ) + result += 1; + + handler = PyOS_setsig(SIGUSR1, oldhandler); + + if( handler != SIG_IGN ) + result += 2; + + return PyInt_FromLong(result); + """), + ], prologue = """ + #include + """) + res = module.test_signals() + assert res == 0 From commits-noreply at bitbucket.org Thu Mar 10 23:44:48 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 10 Mar 2011 23:44:48 +0100 (CET) Subject: [pypy-svn] pypy default: Fix PyOS_setsig function on Windows. Message-ID: <20110310224448.E58DD282BA1@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42500:dd8ead285f81 Date: 2011-03-10 23:43 +0100 http://bitbucket.org/pypy/pypy/changeset/dd8ead285f81/ Log: Fix PyOS_setsig function on Windows. diff --git a/pypy/module/cpyext/src/pysignals.c b/pypy/module/cpyext/src/pysignals.c --- a/pypy/module/cpyext/src/pysignals.c +++ b/pypy/module/cpyext/src/pysignals.c @@ -46,8 +46,10 @@ #else PyOS_sighandler_t oldhandler; oldhandler = signal(sig, handler); +#ifndef MS_WINDOWS /* should check if this exists */ siginterrupt(sig, 1); +#endif return oldhandler; #endif } diff --git a/pypy/module/cpyext/test/test_pysignals.py b/pypy/module/cpyext/test/test_pysignals.py --- a/pypy/module/cpyext/test/test_pysignals.py +++ b/pypy/module/cpyext/test/test_pysignals.py @@ -9,14 +9,14 @@ PyOS_sighandler_t oldhandler; int result = 0; - oldhandler = PyOS_getsig(SIGUSR1); + oldhandler = PyOS_getsig(SIGFPE); - handler = PyOS_setsig(SIGUSR1, SIG_IGN); + handler = PyOS_setsig(SIGFPE, SIG_IGN); if( oldhandler != handler ) result += 1; - handler = PyOS_setsig(SIGUSR1, oldhandler); + handler = PyOS_setsig(SIGFPE, oldhandler); if( handler != SIG_IGN ) result += 2; From commits-noreply at bitbucket.org Fri Mar 11 03:03:25 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Fri, 11 Mar 2011 03:03:25 +0100 (CET) Subject: [pypy-svn] pypy default: when listdir() receives a unicode string it should return a list of unicode strings. Message-ID: <20110311020325.435AB282B8B@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42502:6500e59df109 Date: 2011-03-10 21:03 -0500 http://bitbucket.org/pypy/pypy/changeset/6500e59df109/ Log: when listdir() receives a unicode string it should return a list of unicode strings. diff --git a/pypy/module/posix/test/test_posix2.py b/pypy/module/posix/test/test_posix2.py --- a/pypy/module/posix/test/test_posix2.py +++ b/pypy/module/posix/test/test_posix2.py @@ -1,7 +1,7 @@ # -*- coding: utf-8 -*- -from pypy.objspace.std import StdObjSpace +from pypy.objspace.std import StdObjSpace from pypy.tool.udir import udir from pypy.conftest import gettestobjspace from pypy.tool.autopath import pypydir @@ -17,7 +17,7 @@ else: # On windows, os.popen uses the subprocess module mod.space = gettestobjspace(usemodules=['posix', '_rawffi', 'thread']) - mod.path = udir.join('posixtestfile.txt') + mod.path = udir.join('posixtestfile.txt') mod.path.write("this is a test") mod.path2 = udir.join('test_posix2-') pdir = udir.ensure('posixtestdir', dir=True) @@ -26,6 +26,9 @@ pdir.join('file2').write("test2") pdir.join('another_longer_file_name').write("test3") mod.pdir = pdir + unicode_dir = udir.ensure('fi\xc5\x9fier.txt', dir=True) + unicode_dir.join('somefile').write('who cares?') + mod.unicode_dir = unicode_dir # in applevel tests, os.stat uses the CPython os.stat. # Be sure to return times with full precision @@ -43,13 +46,14 @@ GET_POSIX = "(): import %s as m ; return m" % os.name -class AppTestPosix: - def setup_class(cls): - cls.space = space +class AppTestPosix: + def setup_class(cls): + cls.space = space cls.w_posix = space.appexec([], GET_POSIX) cls.w_path = space.wrap(str(path)) cls.w_path2 = space.wrap(str(path2)) cls.w_pdir = space.wrap(str(pdir)) + cls.w_unicode_dir = space.wrap(str(unicode_dir).decode(sys.getfilesystemencoding())) if hasattr(os, 'getuid'): cls.w_getuid = space.wrap(os.getuid()) cls.w_geteuid = space.wrap(os.geteuid()) @@ -76,16 +80,16 @@ def setup_method(self, meth): if getattr(meth, 'need_sparse_files', False): need_sparse_files() - - def test_posix_is_pypy_s(self): - assert self.posix.__file__ - def test_some_posix_basic_operation(self): - path = self.path - posix = self.posix + def test_posix_is_pypy_s(self): + assert self.posix.__file__ + + def test_some_posix_basic_operation(self): + path = self.path + posix = self.posix fd = posix.open(path, posix.O_RDONLY, 0777) fd2 = posix.dup(fd) - assert not posix.isatty(fd2) + assert not posix.isatty(fd2) s = posix.read(fd, 1) assert s == 't' posix.lseek(fd, 5, 0) @@ -125,7 +129,7 @@ assert hasattr(st, 'st_rdev') def test_stat_float_times(self): - path = self.path + path = self.path posix = self.posix current = posix.stat_float_times() assert current is True @@ -180,13 +184,13 @@ assert new == st assert type(new) is type(st) - def test_open_exception(self): + def test_open_exception(self): posix = self.posix - try: + try: posix.open('qowieuqwoeiu', 0, 0) except OSError, e: assert e.filename == 'qowieuqwoeiu' - else: + else: assert 0 def test_filename_exception(self): @@ -229,15 +233,15 @@ else: assert 0 - def test_functions_raise_error(self): + def test_functions_raise_error(self): def ex(func, *args): try: func(*args) - except OSError: + except OSError: pass else: raise AssertionError("%s(%s) did not raise" %( - func.__name__, + func.__name__, ", ".join([str(x) for x in args]))) UNUSEDFD = 123123 ex(self.posix.open, "qweqwe", 0, 0) @@ -249,7 +253,7 @@ #UMPF cpython raises IOError ex(self.posix.ftruncate, UNUSEDFD, 123) ex(self.posix.fstat, UNUSEDFD) ex(self.posix.stat, "qweqwehello") - # how can getcwd() raise? + # how can getcwd() raise? ex(self.posix.dup, UNUSEDFD) def test_fdopen(self): @@ -286,13 +290,20 @@ def test_listdir(self): pdir = self.pdir - posix = self.posix + posix = self.posix result = posix.listdir(pdir) result.sort() assert result == ['another_longer_file_name', 'file1', 'file2'] + def test_listdir_unicode(self): + unicode_dir = self.unicode_dir + posix = self.posix + result = posix.listdir(unicode_dir) + result.sort() + assert result == [u'somefile'] + assert type(result[0]) is unicode def test_access(self): pdir = self.pdir + '/file1' @@ -373,7 +384,7 @@ os.waitpid(pid, 0) assert open("onefile").read() == "1" os.unlink("onefile") - + def test_execv_raising(self): os = self.posix raises(OSError, 'os.execv("saddsadsadsadsa", ["saddsadsasaddsa"])') @@ -495,7 +506,7 @@ def test_os_getgid(self): os = self.posix assert os.getgid() == self.getgid - + if hasattr(os, 'getgroups'): def test_os_getgroups(self): os = self.posix @@ -816,8 +827,8 @@ class AppTestEnvironment(object): - def setup_class(cls): - cls.space = space + def setup_class(cls): + cls.space = space cls.w_posix = space.appexec([], "(): import %s as m ; return m" % os.name) cls.w_os = space.appexec([], "(): import os; return os") cls.w_path = space.wrap(str(path)) @@ -919,7 +930,7 @@ import pexpect except ImportError: py.test.skip("pexpect not found") - + def _spawn(self, *args, **kwds): import pexpect print 'SPAWN:', args, kwds diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py --- a/pypy/module/posix/interp_posix.py +++ b/pypy/module/posix/interp_posix.py @@ -17,7 +17,7 @@ c_int = "c_int" c_nonnegint = "c_nonnegint" -class FileEncoder: +class FileEncoder(object): def __init__(self, space, w_obj): self.space = space self.w_obj = w_obj @@ -32,7 +32,7 @@ def as_unicode(self): return self.space.unicode_w(self.w_obj) -class FileDecoder: +class FileDecoder(object): def __init__(self, space, w_obj): self.space = space self.w_obj = w_obj @@ -86,7 +86,7 @@ try: fd = dispatch_filename(rposix.open)( space, w_fname, flag, mode) - except OSError, e: + except OSError, e: raise wrap_oserror2(space, e, w_fname) return space.wrap(fd) @@ -97,10 +97,10 @@ current position; if how == 2, to the end.""" try: pos = os.lseek(fd, pos, how) - except OSError, e: - raise wrap_oserror(space, e) - else: - return space.wrap(pos) + except OSError, e: + raise wrap_oserror(space, e) + else: + return space.wrap(pos) @unwrap_spec(fd=c_int) def isatty(space, fd): @@ -108,39 +108,39 @@ slave end of a terminal.""" try: res = os.isatty(fd) - except OSError, e: - raise wrap_oserror(space, e) - else: - return space.wrap(res) + except OSError, e: + raise wrap_oserror(space, e) + else: + return space.wrap(res) @unwrap_spec(fd=c_int, buffersize=int) def read(space, fd, buffersize): """Read data from a file descriptor.""" - try: + try: s = os.read(fd, buffersize) - except OSError, e: - raise wrap_oserror(space, e) - else: - return space.wrap(s) + except OSError, e: + raise wrap_oserror(space, e) + else: + return space.wrap(s) @unwrap_spec(fd=c_int, data='bufferstr') def write(space, fd, data): """Write a string to a file descriptor. Return the number of bytes actually written, which may be smaller than len(data).""" - try: + try: res = os.write(fd, data) - except OSError, e: - raise wrap_oserror(space, e) - else: - return space.wrap(res) + except OSError, e: + raise wrap_oserror(space, e) + else: + return space.wrap(res) @unwrap_spec(fd=c_int) def close(space, fd): """Close a file descriptor (for low level IO).""" - try: + try: os.close(fd) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) @unwrap_spec(fd_low=c_int, fd_high=c_int) def closerange(fd_low, fd_high): @@ -160,8 +160,8 @@ space.wrap(e.filename)) raise OperationError(space.w_OSError, w_error) raise AssertionError - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) def fsync(space, w_fd): """Force write of file with filedescriptor to disk.""" @@ -242,8 +242,8 @@ file descriptor.""" try: st = os.fstat(fd) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) else: return build_stat_result(space, st) @@ -264,9 +264,9 @@ try: st = dispatch_filename(rposix.stat)(space, w_path) - except OSError, e: + except OSError, e: raise wrap_oserror2(space, e, w_path) - else: + else: return build_stat_result(space, st) def lstat(space, w_path): @@ -291,7 +291,7 @@ If newval is omitted, return the current setting. """ state = space.fromcache(StatState) - + if w_value is None: return space.wrap(state.stat_float_times) else: @@ -303,8 +303,8 @@ descriptor.""" try: newfd = os.dup(fd) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) else: return space.wrap(newfd) @@ -313,8 +313,8 @@ """Duplicate a file descriptor.""" try: os.dup2(old_fd, new_fd) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) @unwrap_spec(mode=c_int) def access(space, w_path, mode): @@ -357,9 +357,9 @@ """Execute the command (a string) in a subshell.""" try: rc = os.system(cmd) - except OSError, e: - raise wrap_oserror(space, e) - else: + except OSError, e: + raise wrap_oserror(space, e) + else: return space.wrap(rc) def unlink(space, w_path): @@ -396,9 +396,9 @@ """Return the current working directory.""" try: cur = os.getcwd() - except OSError, e: - raise wrap_oserror(space, e) - else: + except OSError, e: + raise wrap_oserror(space, e) + else: return space.wrap(cur) if sys.platform == 'win32': @@ -462,7 +462,7 @@ class State: - def __init__(self, space): + def __init__(self, space): self.space = space self.w_environ = space.newdict() if _WIN: @@ -478,7 +478,7 @@ self.cryptProviderPtr[0] = HCRYPTPROV._default return True -def get(space): +def get(space): return space.fromcache(State) def _convertenviron(space, w_env): @@ -492,7 +492,7 @@ try: os.environ[name] = value except OSError, e: - raise wrap_oserror(space, e) + raise wrap_oserror(space, e) @unwrap_spec(name=str) def unsetenv(space, name): @@ -502,7 +502,7 @@ except KeyError: pass except OSError, e: - raise wrap_oserror(space, e) + raise wrap_oserror(space, e) def listdir(space, w_dirname): @@ -512,11 +512,16 @@ The list is in arbitrary order. It does not include the special entries '.' and '..' even if they are present in the directory.""" + from pypy.module.sys.interp_encoding import getfilesystemencoding try: if space.isinstance_w(w_dirname, space.w_unicode): dirname = FileEncoder(space, w_dirname) result = rposix.listdir(dirname) - result_w = [space.wrap(s) for s in result] + w_fs_encoding = getfilesystemencoding(space) + result_w = [ + space.call_method(space.wrap(s), "decode", w_fs_encoding) + for s in result + ] else: dirname = space.str_w(w_dirname) result = rposix.listdir(dirname) @@ -527,10 +532,10 @@ def pipe(space): "Create a pipe. Returns (read_end, write_end)." - try: + try: fd1, fd2 = os.pipe() - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) return space.newtuple([space.wrap(fd1), space.wrap(fd2)]) @unwrap_spec(mode=c_int) @@ -546,14 +551,14 @@ try: dispatch_filename_2(rposix.rename)(space, w_old, w_new) except OSError, e: - raise wrap_oserror(space, e) + raise wrap_oserror(space, e) @unwrap_spec(mode=c_int) def mkfifo(space, w_filename, mode=0666): """Create a FIFO (a POSIX named pipe).""" try: dispatch_filename(rposix.mkfifo)(space, w_filename, mode) - except OSError, e: + except OSError, e: raise wrap_oserror2(space, e, w_filename) @unwrap_spec(mode=c_int, device=c_int) @@ -566,7 +571,7 @@ os.makedev()), otherwise it is ignored.""" try: dispatch_filename(rposix.mknod)(space, w_filename, mode, device) - except OSError, e: + except OSError, e: raise wrap_oserror2(space, e, w_filename) @unwrap_spec(mask=c_int) @@ -577,10 +582,10 @@ def getpid(space): "Return the current process id." - try: + try: pid = os.getpid() - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) return space.wrap(pid) @unwrap_spec(pid=c_int, sig=c_int) @@ -608,25 +613,25 @@ @unwrap_spec(src=str, dst=str) def link(space, src, dst): "Create a hard link to a file." - try: + try: os.link(src, dst) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) @unwrap_spec(src=str, dst=str) def symlink(space, src, dst): "Create a symbolic link pointing to src named dst." - try: + try: os.symlink(src, dst) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) @unwrap_spec(path=str) def readlink(space, path): "Return a string representing the path to which the symbolic link points." try: result = os.readlink(path) - except OSError, e: + except OSError, e: raise wrap_oserror(space, e, path) return space.wrap(result) @@ -693,13 +698,13 @@ @unwrap_spec(pid=c_int, options=c_int) def waitpid(space, pid, options): """ waitpid(pid, options) -> (pid, status) - + Wait for completion of a given child process. """ try: pid, status = os.waitpid(pid, options) - except OSError, e: - raise wrap_oserror(space, e) + except OSError, e: + raise wrap_oserror(space, e) return space.newtuple([space.wrap(pid), space.wrap(status)]) @unwrap_spec(status=c_int) @@ -791,7 +796,7 @@ def setsid(space): """setsid() -> pid - + Creates a new session with this process as the leader. """ try: @@ -881,14 +886,14 @@ def getgid(space): """ getgid() -> gid - + Return the current process's group id. """ return space.wrap(os.getgid()) def getegid(space): """ getegid() -> gid - + Return the current process's effective group id. """ return space.wrap(os.getegid()) @@ -923,7 +928,7 @@ os.setpgrp() except OSError, e: raise wrap_oserror(space, e) - return space.w_None + return space.w_None def getppid(space): """ getppid() -> ppid @@ -954,7 +959,7 @@ os.setpgid(pid, pgrp) except OSError, e: raise wrap_oserror(space, e) - return space.w_None + return space.w_None @unwrap_spec(ruid=c_int, euid=c_int) def setreuid(space, ruid, euid): @@ -966,7 +971,7 @@ os.setreuid(ruid, euid) except OSError, e: raise wrap_oserror(space, e) - return space.w_None + return space.w_None @unwrap_spec(rgid=c_int, egid=c_int) def setregid(space, rgid, egid): @@ -1001,7 +1006,7 @@ os.setsid() except OSError, e: raise wrap_oserror(space, e) - return space.w_None + return space.w_None def declare_new_w_star(name): if name in RegisterOs.w_star_returning_int: From commits-noreply at bitbucket.org Fri Mar 11 04:29:27 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Fri, 11 Mar 2011 04:29:27 +0100 (CET) Subject: [pypy-svn] pypy default: os.symlink can now take unicode strings. Message-ID: <20110311032927.B3BDC282B8B@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42503:56c20f84016f Date: 2011-03-10 22:29 -0500 http://bitbucket.org/pypy/pypy/changeset/56c20f84016f/ Log: os.symlink can now take unicode strings. diff --git a/pypy/rlib/rposix.py b/pypy/rlib/rposix.py --- a/pypy/rlib/rposix.py +++ b/pypy/rlib/rposix.py @@ -149,6 +149,13 @@ else: os.mknod(path.as_bytes(), mode, device) + at specialize.argtype(0, 1) +def symlink(src, dest): + if isinstance(src, str): + os.symlink(src, dest) + else: + os.symlink(src.as_bytes(), dest.as_bytes()) + if os.name == 'nt': import nt def _getfullpathname(path): diff --git a/pypy/module/posix/test/test_posix2.py b/pypy/module/posix/test/test_posix2.py --- a/pypy/module/posix/test/test_posix2.py +++ b/pypy/module/posix/test/test_posix2.py @@ -779,6 +779,16 @@ assert os.WIFEXITED(status1) assert os.WEXITSTATUS(status1) == myprio + 3 + if hasattr(os, 'symlink'): + def test_symlink(self): + posix = self.posix + unicode_dir = self.unicode_dir + dest = u"%s/file.txt" % unicode_dir + posix.symlink(u"%s/somefile" % unicode_dir, dest) + with open(dest) as f: + data = f.read() + assert data == "who cares?" + def test_tmpfile(self): os = self.posix f = os.tmpfile() diff --git a/pypy/module/posix/interp_posix.py b/pypy/module/posix/interp_posix.py --- a/pypy/module/posix/interp_posix.py +++ b/pypy/module/posix/interp_posix.py @@ -618,11 +618,10 @@ except OSError, e: raise wrap_oserror(space, e) - at unwrap_spec(src=str, dst=str) -def symlink(space, src, dst): +def symlink(space, w_src, w_dst): "Create a symbolic link pointing to src named dst." try: - os.symlink(src, dst) + dispatch_filename_2(rposix.symlink)(space, w_src, w_dst) except OSError, e: raise wrap_oserror(space, e) From commits-noreply at bitbucket.org Fri Mar 11 08:56:13 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 08:56:13 +0100 (CET) Subject: [pypy-svn] pypy default: Skip posix-unicode tests when filesystem encoding is ascii Message-ID: <20110311075613.12311282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42504:0b0d7bb40f5b Date: 2011-03-11 07:55 +0000 http://bitbucket.org/pypy/pypy/changeset/0b0d7bb40f5b/ Log: Skip posix-unicode tests when filesystem encoding is ascii (i.e on 32bit tannit) diff --git a/pypy/module/posix/test/test_posix2.py b/pypy/module/posix/test/test_posix2.py --- a/pypy/module/posix/test/test_posix2.py +++ b/pypy/module/posix/test/test_posix2.py @@ -53,7 +53,12 @@ cls.w_path = space.wrap(str(path)) cls.w_path2 = space.wrap(str(path2)) cls.w_pdir = space.wrap(str(pdir)) - cls.w_unicode_dir = space.wrap(str(unicode_dir).decode(sys.getfilesystemencoding())) + try: + cls.w_unicode_dir = space.wrap( + str(unicode_dir).decode(sys.getfilesystemencoding())) + except UnicodeDecodeError: + # filesystem encoding is not good enough + cls.w_unicode_dir = space.w_None if hasattr(os, 'getuid'): cls.w_getuid = space.wrap(os.getuid()) cls.w_geteuid = space.wrap(os.geteuid()) @@ -299,6 +304,8 @@ def test_listdir_unicode(self): unicode_dir = self.unicode_dir + if unicode_dir is None: + skip("encoding not good enough") posix = self.posix result = posix.listdir(unicode_dir) result.sort() @@ -783,6 +790,8 @@ def test_symlink(self): posix = self.posix unicode_dir = self.unicode_dir + if unicode_dir is None: + skip("encoding not good enough") dest = u"%s/file.txt" % unicode_dir posix.symlink(u"%s/somefile" % unicode_dir, dest) with open(dest) as f: From commits-noreply at bitbucket.org Fri Mar 11 09:02:12 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 09:02:12 +0100 (CET) Subject: [pypy-svn] pypy default: Reset the timeout to its previous value. Message-ID: <20110311080212.51B8A36C209@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42505:00dc3cc7aac6 Date: 2011-03-11 09:02 +0100 http://bitbucket.org/pypy/pypy/changeset/00dc3cc7aac6/ Log: Reset the timeout to its previous value. The test suite used to block in several places, this is no more the case. And test_hashlib needs more time for the pure-python implementation of sha256 diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -30,7 +30,7 @@ def pytest_addoption(parser): group = parser.getgroup("complicance testing options") group.addoption('-T', '--timeout', action="store", type="string", - default="100", dest="timeout", + default="1000", dest="timeout", help="fail a test module after the given timeout. " "specify in seconds or 'NUMmp' aka Mega-Pystones") group.addoption('--pypy', action="store", type="string", From commits-noreply at bitbucket.org Fri Mar 11 09:10:30 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 09:10:30 +0100 (CET) Subject: [pypy-svn] pypy default: Add a modifiable copy of test_cmd_line_script Message-ID: <20110311081030.65EE8282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42506:72d42771c863 Date: 2011-03-11 09:05 +0100 http://bitbucket.org/pypy/pypy/changeset/72d42771c863/ Log: Add a modifiable copy of test_cmd_line_script diff --git a/lib-python/2.7.0/test/test_cmd_line_script.py b/lib-python/modified-2.7.0/test/test_cmd_line_script.py copy from lib-python/2.7.0/test/test_cmd_line_script.py copy to lib-python/modified-2.7.0/test/test_cmd_line_script.py From commits-noreply at bitbucket.org Fri Mar 11 09:10:31 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 09:10:31 +0100 (CET) Subject: [pypy-svn] pypy default: Skip two tests that try to import __main__.pyc when __main__.py is missing Message-ID: <20110311081031.0A7C0282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42507:2a1afe6388d8 Date: 2011-03-11 09:10 +0100 http://bitbucket.org/pypy/pypy/changeset/2a1afe6388d8/ Log: Skip two tests that try to import __main__.pyc when __main__.py is missing diff --git a/lib-python/modified-2.7.0/test/test_cmd_line_script.py b/lib-python/modified-2.7.0/test/test_cmd_line_script.py --- a/lib-python/modified-2.7.0/test/test_cmd_line_script.py +++ b/lib-python/modified-2.7.0/test/test_cmd_line_script.py @@ -112,6 +112,8 @@ self._check_script(script_dir, script_name, script_dir, '') def test_directory_compiled(self): + if test.test_support.check_impl_detail(pypy=True): + raise unittest.SkipTest("pypy won't load lone .pyc files") with temp_dir() as script_dir: script_name = _make_test_script(script_dir, '__main__') compiled_name = compile_script(script_name) @@ -173,6 +175,8 @@ script_name, 'test_pkg') def test_package_compiled(self): + if test.test_support.check_impl_detail(pypy=True): + raise unittest.SkipTest("pypy won't load lone .pyc files") with temp_dir() as script_dir: pkg_dir = os.path.join(script_dir, 'test_pkg') make_pkg(pkg_dir) From commits-noreply at bitbucket.org Fri Mar 11 13:41:28 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 13:41:28 +0100 (CET) Subject: [pypy-svn] pypy default: Add a failing test about borrowed references. Message-ID: <20110311124128.599AD282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42508:a4e4beea777e Date: 2011-03-11 13:41 +0100 http://bitbucket.org/pypy/pypy/changeset/a4e4beea777e/ Log: Add a failing test about borrowed references. You'd better not borrow the same object from different containers... diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -38,3 +38,24 @@ ]) assert module.test_borrowing() # the test should not leak + def test_borrow_destroy(self): + skip("FIXME") + module = self.import_extension('foo', [ + ("test_borrow_destroy", "METH_NOARGS", + """ + PyObject *i = PyInt_FromLong(42); + PyObject *j; + PyObject *t1 = PyTuple_Pack(1, i); + PyObject *t2 = PyTuple_Pack(1, i); + Py_DECREF(i); + + i = PyTuple_GetItem(t1, 0); + PyTuple_GetItem(t2, 0); + Py_DECREF(t2); + + j = PyInt_FromLong(PyInt_AsLong(i)); + Py_DECREF(t1); + return j; + """), + ]) + assert module.test_borrow_destroy() == 42 From commits-noreply at bitbucket.org Fri Mar 11 13:52:32 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Fri, 11 Mar 2011 13:52:32 +0100 (CET) Subject: [pypy-svn] pypy default: actually exit with proper exit code Message-ID: <20110311125232.45CAD282B8B@codespeak.net> Author: holger krekel Branch: Changeset: r42509:4385c59d38f7 Date: 2011-03-11 13:51 +0100 http://bitbucket.org/pypy/pypy/changeset/4385c59d38f7/ Log: actually exit with proper exit code diff --git a/py/bin/py.test b/py/bin/py.test --- a/py/bin/py.test +++ b/py/bin/py.test @@ -1,3 +1,3 @@ #!/usr/bin/env python from _findpy import pytest -pytest.main() +raise SystemExit(pytest.main()) From commits-noreply at bitbucket.org Fri Mar 11 13:57:18 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Fri, 11 Mar 2011 13:57:18 +0100 (CET) Subject: [pypy-svn] pypy default: using dev version of py with terminal coloring fix (thanks amaury for pointing out) Message-ID: <20110311125718.1C38D282B8B@codespeak.net> Author: holger krekel Branch: Changeset: r42510:a5125605632b Date: 2011-03-11 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/a5125605632b/ Log: using dev version of py with terminal coloring fix (thanks amaury for pointing out) diff --git a/py/_io/terminalwriter.py b/py/_io/terminalwriter.py --- a/py/_io/terminalwriter.py +++ b/py/_io/terminalwriter.py @@ -199,8 +199,10 @@ attr |= FOREGROUND_BLUE elif kw.pop('green', False): attr |= FOREGROUND_GREEN + elif kw.pop('yellow', False): + attr |= FOREGROUND_GREEN|FOREGROUND_RED else: - attr |= FOREGROUND_BLACK # (oldcolors & 0x0007) + attr |= oldcolors & 0x0007 SetConsoleTextAttribute(handle, attr) if not isinstance(self._file, WriteFile): @@ -211,7 +213,8 @@ SetConsoleTextAttribute(handle, oldcolors) def line(self, s="", **kw): - self.write(s+"\n", **kw) + self.write(s, **kw) # works better for resetting colors + self.write("\n") class WriteFile(object): def __init__(self, writemethod, encoding=None): diff --git a/py/__init__.py b/py/__init__.py --- a/py/__init__.py +++ b/py/__init__.py @@ -8,7 +8,7 @@ (c) Holger Krekel and others, 2004-2010 """ -__version__ = '1.4.2' +__version__ = '1.4.3.dev0' from py import _apipkg From commits-noreply at bitbucket.org Fri Mar 11 14:42:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 14:42:57 +0100 (CET) Subject: [pypy-svn] pypy getdict-signature: A branch to change the signature of Wrappable.getdict: from (self) to (self, space) Message-ID: <20110311134257.3EE0036C20D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: getdict-signature Changeset: r42511:2b1d5b2c01a9 Date: 2011-03-11 14:15 +0100 http://bitbucket.org/pypy/pypy/changeset/2b1d5b2c01a9/ Log: A branch to change the signature of Wrappable.getdict: from (self) to (self, space) From commits-noreply at bitbucket.org Fri Mar 11 14:43:00 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 14:43:00 +0100 (CET) Subject: [pypy-svn] pypy getdict-signature: getdict() now takes the 'space' as argument Message-ID: <20110311134300.3F010282BD6@codespeak.net> Author: Amaury Forgeot d'Arc Branch: getdict-signature Changeset: r42512:93ae975476a6 Date: 2011-03-11 14:26 +0100 http://bitbucket.org/pypy/pypy/changeset/93ae975476a6/ Log: getdict() now takes the 'space' as argument diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py --- a/pypy/interpreter/function.py +++ b/pypy/interpreter/function.py @@ -176,9 +176,9 @@ i += 1 return new_frame.run() - def getdict(self): + def getdict(self, space): if self.w_func_dict is None: - self.w_func_dict = self.space.newdict(instance=True) + self.w_func_dict = space.newdict(instance=True) return self.w_func_dict def setdict(self, space, w_dict): diff --git a/pypy/interpreter/interactive.py b/pypy/interpreter/interactive.py --- a/pypy/interpreter/interactive.py +++ b/pypy/interpreter/interactive.py @@ -27,7 +27,8 @@ import keyword w_res = self.space.call_method(self.w_globals, "keys") namespace_keys = self.space.unwrap(w_res) - w_res = self.space.call_method(self.space.builtin.getdict(), "keys") + w_res = self.space.call_method(self.space.builtin.getdict(self.space), + "keys") builtin_keys = self.space.unwrap(w_res) matches = [] diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -261,7 +261,7 @@ if "user_setup" in body: base_user_setup = body["user_setup"] class Proto(object): - def getdict(self): + def getdict(self, space): return self.w__dict__ def setdict(self, space, w_dict): @@ -566,7 +566,7 @@ from pypy.interpreter.special import NotImplemented, Ellipsis def descr_get_dict(space, w_obj): - w_dict = w_obj.getdict() + w_dict = w_obj.getdict(space) if w_dict is None: typename = space.type(w_obj).getname(space) raise operationerrfmt(space.w_TypeError, diff --git a/pypy/interpreter/module.py b/pypy/interpreter/module.py --- a/pypy/interpreter/module.py +++ b/pypy/interpreter/module.py @@ -58,7 +58,7 @@ sys.exitfunc(), if the module has been imported. """ - def getdict(self): + def getdict(self, space): return self.w_dict def descr_module__new__(space, w_subtype, __args__): @@ -87,8 +87,9 @@ w_mod = space.getbuiltinmodule('_pickle_support') mod = space.interp_w(MixedModule, w_mod) new_inst = mod.get('module_new') - return space.newtuple([new_inst, space.newtuple([w_name, - self.getdict()]), + return space.newtuple([new_inst, + space.newtuple([w_name, + self.getdict(space)]), ]) #already imported case w_import = space.builtin.get('__import__') diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -28,24 +28,24 @@ _settled_ = True user_overridden_class = False - def getdict(self): + def getdict(self, space): return None def getdictvalue(self, space, attr): - w_dict = self.getdict() + w_dict = self.getdict(space) if w_dict is not None: return space.finditem_str(w_dict, attr) return None def setdictvalue(self, space, attr, w_value): - w_dict = self.getdict() + w_dict = self.getdict(space) if w_dict is not None: space.setitem_str(w_dict, attr, w_value) return True return False def deldictvalue(self, space, w_name): - w_dict = self.getdict() + w_dict = self.getdict(space) if w_dict is not None: try: space.delitem(w_dict, w_name) @@ -510,7 +510,7 @@ def export_builtin_exceptions(self): """NOT_RPYTHON""" - w_dic = self.exceptions_module.getdict() + w_dic = self.exceptions_module.getdict(self) w_keys = self.call_method(w_dic, "keys") exc_types_w = {} for w_name in self.unpackiterable(w_keys): diff --git a/pypy/interpreter/mixedmodule.py b/pypy/interpreter/mixedmodule.py --- a/pypy/interpreter/mixedmodule.py +++ b/pypy/interpreter/mixedmodule.py @@ -115,9 +115,8 @@ return w_value - def getdict(self): + def getdict(self, space): if self.lazy: - space = self.space for name in self.loaders: w_value = self.get(name) space.setitem(self.w_dict, space.new_interned_str(name), w_value) @@ -126,7 +125,7 @@ return self.w_dict def _freeze_(self): - self.getdict() + self.getdict(self.space) self.w_initialdict = None self.startup_called = False self._frozen = True diff --git a/pypy/interpreter/pyframe.py b/pypy/interpreter/pyframe.py --- a/pypy/interpreter/pyframe.py +++ b/pypy/interpreter/pyframe.py @@ -584,7 +584,7 @@ return pytraceback.offset2lineno(self.pycode, self.last_instr) def fget_f_builtins(self, space): - return self.get_builtin().getdict() + return self.get_builtin().getdict(space) def fget_f_back(self, space): return self.space.wrap(self.f_backref()) From commits-noreply at bitbucket.org Fri Mar 11 14:43:07 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 14:43:07 +0100 (CET) Subject: [pypy-svn] pypy getdict-signature: Add space argument to all instances of w_obj.getdict() Message-ID: <20110311134307.2D3EB282BD6@codespeak.net> Author: Amaury Forgeot d'Arc Branch: getdict-signature Changeset: r42513:a79459edc25c Date: 2011-03-11 14:39 +0100 http://bitbucket.org/pypy/pypy/changeset/a79459edc25c/ Log: Add space argument to all instances of w_obj.getdict() diff --git a/pypy/module/thread/os_local.py b/pypy/module/thread/os_local.py --- a/pypy/module/thread/os_local.py +++ b/pypy/module/thread/os_local.py @@ -10,18 +10,16 @@ """Thread-local data""" def __init__(self, space, initargs): - self.space = space self.initargs = initargs ident = thread.get_ident() self.dicts = {ident: space.newdict()} - def getdict(self): + def getdict(self, space): ident = thread.get_ident() try: w_dict = self.dicts[ident] except KeyError: # create a new dict for this thread - space = self.space w_dict = self.dicts[ident] = space.newdict(instance=True) # call __init__ try: diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -20,9 +20,9 @@ w_globals = caller.w_globals w_builtins = space.getitem(w_globals, space.wrap('__builtins__')) if not space.isinstance_w(w_builtins, space.w_dict): - w_builtins = w_builtins.getdict() + w_builtins = w_builtins.getdict(space) else: - w_builtins = space.builtin.getdict() + w_builtins = space.builtin.getdict(space) return borrow_from(None, w_builtins) @cpython_api([], PyObject, error=CANNOT_FAIL) diff --git a/pypy/objspace/fake/checkmodule.py b/pypy/objspace/fake/checkmodule.py --- a/pypy/objspace/fake/checkmodule.py +++ b/pypy/objspace/fake/checkmodule.py @@ -81,7 +81,7 @@ ModuleClass = __import__(basepath + '.%s' % modname, None, None, ['Module']).Module module = ModuleClass(space, space.wrap(modname)) - w_moduledict = module.getdict() + w_moduledict = module.getdict(space) gateways = find_gateways(modname, basepath, module) functions = [gw.__spacebind__(space) for gw in gateways] diff --git a/pypy/objspace/std/bytearraytype.py b/pypy/objspace/std/bytearraytype.py --- a/pypy/objspace/std/bytearraytype.py +++ b/pypy/objspace/std/bytearraytype.py @@ -104,7 +104,7 @@ def descr_bytearray__reduce__(space, w_self): from pypy.objspace.std.bytearrayobject import W_BytearrayObject assert isinstance(w_self, W_BytearrayObject) - w_dict = w_self.getdict() + w_dict = w_self.getdict(space) if w_dict is None: w_dict = space.w_None return space.newtuple([ diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -217,15 +217,15 @@ class DevolvedDictTerminator(Terminator): def _read_terminator(self, obj, selector): if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) return space.finditem_str(w_dict, selector[0]) return Terminator._read_terminator(self, obj, selector) def _write_terminator(self, obj, selector, w_value): if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) space.setitem_str(w_dict, selector[0], w_value) return True return Terminator._write_terminator(self, obj, selector, w_value) @@ -233,8 +233,8 @@ def delete(self, obj, selector): from pypy.interpreter.error import OperationError if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) try: space.delitem(w_dict, space.wrap(selector[0])) except OperationError, ex: @@ -377,12 +377,12 @@ self._become(new_obj) return True - def getdict(self): + def getdict(self, space): w_dict = self._get_mapdict_map().read(self, ("dict", SPECIAL)) if w_dict is not None: assert isinstance(w_dict, W_DictMultiObject) return w_dict - w_dict = MapDictImplementation(self.space, self) + w_dict = MapDictImplementation(space, self) flag = self._get_mapdict_map().write(self, ("dict", SPECIAL), w_dict) assert flag return w_dict @@ -390,7 +390,7 @@ def setdict(self, space, w_dict): from pypy.interpreter.typedef import check_new_dictionary w_dict = check_new_dictionary(space, w_dict) - w_olddict = self.getdict() + w_olddict = self.getdict(space) assert isinstance(w_dict, W_DictMultiObject) if w_olddict.r_dict_content is None: w_olddict._as_rdict() @@ -648,7 +648,7 @@ def materialize_r_dict(space, obj, w_d): map = obj._get_mapdict_map() - assert obj.getdict() is w_d + assert obj.getdict(space) is w_d new_obj = map.materialize_r_dict(space, obj, w_d) _become(obj, new_obj) diff --git a/pypy/module/cpyext/state.py b/pypy/module/cpyext/state.py --- a/pypy/module/cpyext/state.py +++ b/pypy/module/cpyext/state.py @@ -111,7 +111,7 @@ return None w_mod = PyImport_AddModule(self.space, name) assert isinstance(w_mod, Module) - w_mdict = w_mod.getdict() + w_mdict = w_mod.getdict(self.space) self.space.call_method(w_mdict, 'update', w_dict) return w_mod @@ -124,6 +124,6 @@ msg = "fixup_extension: module '%s' not loaded" % name raise OperationError(space.w_SystemError, space.wrap(msg)) - w_dict = w_mod.getdict() + w_dict = w_mod.getdict(space) w_copy = space.call_method(w_dict, 'copy') self.extensions[path] = w_copy diff --git a/pypy/objspace/taint.py b/pypy/objspace/taint.py --- a/pypy/objspace/taint.py +++ b/pypy/objspace/taint.py @@ -17,8 +17,8 @@ def __init__(self, w_obj): self.w_obj = w_obj -## def getdict(self): -## return taint(self.w_obj.getdict()) +## def getdict(self, space): +## return taint(self.w_obj.getdict(space)) ## def getdictvalue(self, space, attr): ## return taint(self.w_obj.getdictvalue(space, attr)) diff --git a/pypy/objspace/std/frame.py b/pypy/objspace/std/frame.py --- a/pypy/objspace/std/frame.py +++ b/pypy/objspace/std/frame.py @@ -74,7 +74,7 @@ if w_value is None: builtins = f.get_builtin() assert isinstance(builtins, Module) - w_builtin_dict = builtins.getdict() + w_builtin_dict = builtins.getdict(f.space) assert isinstance(w_builtin_dict, W_DictMultiObject) w_value = w_builtin_dict.get_builtin_indexed(num) if w_value is None: diff --git a/pypy/objspace/std/proxyobject.py b/pypy/objspace/std/proxyobject.py --- a/pypy/objspace/std/proxyobject.py +++ b/pypy/objspace/std/proxyobject.py @@ -19,7 +19,6 @@ def __init__(self, space, w_type, w_controller): self.w_type = w_type self.w_controller = w_controller - self.space = space def descr_call_mismatch(self, space, name, reqcls, args): args_w = args.arguments_w[:] @@ -63,8 +62,8 @@ raise return False - def getdict(self): - return self.getdictvalue(self.space, '__dict__') + def getdict(self, space): + return self.getdictvalue(space, '__dict__') def setdict(self, space, w_dict): if not self.setdictvalue(space, '__dict__', w_dict): diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -355,10 +355,9 @@ del w_self.lazyloaders return False - def getdict(w_self): # returning a dict-proxy! + def getdict(w_self, space): # returning a dict-proxy! if w_self.lazyloaders: w_self._freeze_() # force un-lazification - space = w_self.space newdic = space.newdict(from_strdict_shared=w_self.dict_w) return W_DictProxyObject(newdic) diff --git a/pypy/module/__builtin__/interp_classobj.py b/pypy/module/__builtin__/interp_classobj.py --- a/pypy/module/__builtin__/interp_classobj.py +++ b/pypy/module/__builtin__/interp_classobj.py @@ -65,7 +65,7 @@ w_inst = cache.cls_without_del(space, self) return w_inst - def getdict(self): + def getdict(self, space): return self.w_dict def setdict(self, space, w_dict): @@ -378,7 +378,7 @@ name = space.str_w(w_attr) if len(name) >= 8 and name[0] == '_': if name == "__dict__": - return self.getdict() + return self.getdict(space) elif name == "__class__": return self.w_class return self.getattr(space, name) diff --git a/pypy/module/_io/interp_bytesio.py b/pypy/module/_io/interp_bytesio.py --- a/pypy/module/_io/interp_bytesio.py +++ b/pypy/module/_io/interp_bytesio.py @@ -180,7 +180,7 @@ return space.newtuple([ w_content, space.wrap(self.pos), - self.getdict()]) + self.getdict(space)]) def setstate_w(self, space, w_state): self._check_closed(space) @@ -200,7 +200,7 @@ "position value cannot be negative")) self.pos = pos if not space.is_w(w_dict, space.w_None): - space.call_method(self.getdict(), "update", w_dict) + space.call_method(self.getdict(space), "update", w_dict) W_BytesIO.typedef = TypeDef( 'BytesIO', W_BufferedIOBase.typedef, diff --git a/pypy/objspace/std/fake.py b/pypy/objspace/std/fake.py --- a/pypy/objspace/std/fake.py +++ b/pypy/objspace/std/fake.py @@ -112,12 +112,12 @@ def __init__(w_self, space, val): w_self.val = val w_self.space = space - def getdict(w_self): + def getdict(w_self, space): try: d = w_self.val.__dict__ except AttributeError: - return W_Object.getdict(w_self) - return w_self.space.wrap(d) + return W_Object.getdict(w_self, space) + return space.wrap(d) def unwrap(w_self, space): return w_self.val if cpy_type is types.FunctionType: diff --git a/pypy/module/_io/interp_iobase.py b/pypy/module/_io/interp_iobase.py --- a/pypy/module/_io/interp_iobase.py +++ b/pypy/module/_io/interp_iobase.py @@ -44,7 +44,7 @@ self.w_dict = space.newdict() self.__IOBase_closed = False - def getdict(self): + def getdict(self, space): return self.w_dict def _closed(self, space): diff --git a/pypy/module/cpyext/sysmodule.py b/pypy/module/cpyext/sysmodule.py --- a/pypy/module/cpyext/sysmodule.py +++ b/pypy/module/cpyext/sysmodule.py @@ -8,7 +8,7 @@ """Return the object name from the sys module or NULL if it does not exist, without setting an exception.""" name = rffi.charp2str(name) - w_dict = space.sys.getdict() + w_dict = space.sys.getdict(space) w_obj = space.finditem_str(w_dict, name) return borrow_from(None, w_obj) @@ -18,6 +18,6 @@ case name is deleted from the sys module. Returns 0 on success, -1 on error.""" name = rffi.charp2str(name) - w_dict = space.sys.getdict() + w_dict = space.sys.getdict(space) space.setitem_str(w_dict, name, w_obj) return 0 diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -223,8 +223,8 @@ obj.setdictvalue(space, "a", 51) obj.setdictvalue(space, "b", 61) obj.setdictvalue(space, "c", 71) - assert obj.getdict() is obj.getdict() - assert obj.getdict().length() == 3 + assert obj.getdict(space) is obj.getdict(space) + assert obj.getdict(space).length() == 3 def test_materialize_r_dict(): @@ -282,7 +282,7 @@ def get_impl(self): cls = Class() w_obj = cls.instantiate(self.fakespace) - return w_obj.getdict() + return w_obj.getdict(self.fakespace) class TestMapDictImplementation(BaseTestRDictImplementation): ImplementionClass = MapDictImplementation get_impl = get_impl @@ -293,8 +293,8 @@ # ___________________________________________________________ # tests that check the obj interface after the dict has devolved -def devolve_dict(obj): - w_d = obj.getdict() +def devolve_dict(space, obj): + w_d = obj.getdict(space) w_d._as_rdict() def test_get_setdictvalue_after_devolve(): @@ -310,7 +310,7 @@ obj.setdictvalue(space, "b", 6) obj.setdictvalue(space, "c", 7) obj.setdictvalue(space, "weakref", 42) - devolve_dict(obj) + devolve_dict(space, obj) assert obj.getdictvalue(space, "a") == 5 assert obj.getdictvalue(space, "b") == 6 assert obj.getdictvalue(space, "c") == 7 @@ -348,10 +348,10 @@ obj.setdictvalue(space, "a", 5) obj.setdictvalue(space, "b", 6) obj.setdictvalue(space, "c", 7) - w_d = obj.getdict() + w_d = obj.getdict(space) obj2 = cls.instantiate() obj2.setdictvalue(space, "d", 8) - obj.setdict(space, obj2.getdict()) + obj.setdict(space, obj2.getdict(space)) assert obj.getdictvalue(space, "a") is None assert obj.getdictvalue(space, "b") is None assert obj.getdictvalue(space, "c") is None diff --git a/pypy/objspace/std/dictproxyobject.py b/pypy/objspace/std/dictproxyobject.py --- a/pypy/objspace/std/dictproxyobject.py +++ b/pypy/objspace/std/dictproxyobject.py @@ -2,7 +2,7 @@ from pypy.objspace.std.register_all import register_all def descr_get_dictproxy(space, w_obj): - return W_DictProxyObject(w_obj.getdict()) + return W_DictProxyObject(w_obj.getdict(space)) class W_DictProxyObject(W_Object): from pypy.objspace.std.dictproxytype import dictproxy_typedef as typedef diff --git a/pypy/module/exceptions/interp_exceptions.py b/pypy/module/exceptions/interp_exceptions.py --- a/pypy/module/exceptions/interp_exceptions.py +++ b/pypy/module/exceptions/interp_exceptions.py @@ -97,7 +97,6 @@ args_w = [] def __init__(self, space): - self.space = space self.w_message = space.w_None def descr_init(self, space, args_w): @@ -149,9 +148,9 @@ def descr_getitem(self, space, w_index): return space.getitem(space.newtuple(self.args_w), w_index) - def getdict(self): + def getdict(self, space): if self.w_dict is None: - self.w_dict = self.space.newdict(instance=True) + self.w_dict = space.newdict(instance=True) return self.w_dict def setdict(self, space, w_dict): @@ -166,7 +165,7 @@ return space.newtuple(lst) def descr_setstate(self, space, w_dict): - w_olddict = self.getdict() + w_olddict = self.getdict(space) space.call_method(w_olddict, 'update', w_dict) def descr_message_get(self, space): @@ -183,7 +182,7 @@ return self.w_message def descr_message_set(self, space, w_new): - space.setitem(self.getdict(), space.wrap("message"), w_new) + space.setitem(self.getdict(space), space.wrap("message"), w_new) def descr_message_del(self, space): w_dict = self.w_dict diff --git a/pypy/module/__builtin__/test/test_classobj.py b/pypy/module/__builtin__/test/test_classobj.py --- a/pypy/module/__builtin__/test/test_classobj.py +++ b/pypy/module/__builtin__/test/test_classobj.py @@ -988,7 +988,7 @@ py.test.skip("can only be run on py.py") def is_strdict(space, w_class): from pypy.objspace.std.dictmultiobject import StrDictImplementation - w_d = w_class.getdict() + w_d = w_class.getdict(space) return space.wrap(isinstance(w_d, StrDictImplementation) and w_d.r_dict_content is None) cls.w_is_strdict = cls.space.wrap(gateway.interp2app(is_strdict)) diff --git a/pypy/module/cpyext/modsupport.py b/pypy/module/cpyext/modsupport.py --- a/pypy/module/cpyext/modsupport.py +++ b/pypy/module/cpyext/modsupport.py @@ -113,7 +113,7 @@ def PyModule_GetDict(space, w_mod): if PyModule_Check(space, w_mod): assert isinstance(w_mod, Module) - w_dict = w_mod.getdict() + w_dict = w_mod.getdict(space) return borrow_from(w_mod, w_dict) else: PyErr_BadInternalCall(space) diff --git a/pypy/module/cpyext/test/test_dictobject.py b/pypy/module/cpyext/test/test_dictobject.py --- a/pypy/module/cpyext/test/test_dictobject.py +++ b/pypy/module/cpyext/test/test_dictobject.py @@ -87,7 +87,7 @@ assert space.unwrap(w_d) == dict(a='b', c='d', e='f') def test_iter(self, space, api): - w_dict = space.sys.getdict() + w_dict = space.sys.getdict(space) py_dict = make_ref(space, w_dict) ppos = lltype.malloc(Py_ssize_tP.TO, 1, flavor='raw') From commits-noreply at bitbucket.org Fri Mar 11 15:45:58 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Fri, 11 Mar 2011 15:45:58 +0100 (CET) Subject: [pypy-svn] pypy default: bump to pytest-2.0.3.dev0 version which contains a fix speeding up skips Message-ID: <20110311144558.82B9D282B8B@codespeak.net> Author: holger krekel Branch: Changeset: r42514:02348155e320 Date: 2011-03-11 15:45 +0100 http://bitbucket.org/pypy/pypy/changeset/02348155e320/ Log: bump to pytest-2.0.3.dev0 version which contains a fix speeding up skips which had prolonged some pypy test runs (app-level most notably) diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -3,7 +3,7 @@ (pypy version of startup script) see http://pytest.org for details. """ -__version__ = '2.0.2' # base pytest version +__version__ = '2.0.3.dev0' # base pytest version __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins diff --git a/_pytest/runner.py b/_pytest/runner.py --- a/_pytest/runner.py +++ b/_pytest/runner.py @@ -153,7 +153,7 @@ longrepr = excinfo elif excinfo.errisinstance(py.test.skip.Exception): outcome = "skipped" - r = item._repr_failure_py(excinfo, "line").reprcrash + r = excinfo._getreprcrash() longrepr = (str(r.path), r.lineno, r.message) else: outcome = "failed" From commits-noreply at bitbucket.org Fri Mar 11 16:44:06 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Fri, 11 Mar 2011 16:44:06 +0100 (CET) Subject: [pypy-svn] pypy default: Try to fix posix tests on systems with unsuitable defaultencodings. Message-ID: <20110311154406.65C71282B8B@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42515:c6a32675ecd5 Date: 2011-03-11 10:43 -0500 http://bitbucket.org/pypy/pypy/changeset/c6a32675ecd5/ Log: Try to fix posix tests on systems with unsuitable defaultencodings. diff --git a/pypy/conftest.py b/pypy/conftest.py --- a/pypy/conftest.py +++ b/pypy/conftest.py @@ -131,7 +131,7 @@ py.test.skip("cannot runappdirect test: space needs %s = %s, "\ "while pypy-c was built with %s" % (key, value, has)) - for name in ('int', 'long', 'str', 'unicode'): + for name in ('int', 'long', 'str', 'unicode', 'None'): setattr(self, 'w_' + name, eval(name)) From commits-noreply at bitbucket.org Fri Mar 11 18:20:51 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:20:51 +0100 (CET) Subject: [pypy-svn] pypy getdict-signature: close branch Message-ID: <20110311172051.189DC282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: getdict-signature Changeset: r42516:8dd1739969f4 Date: 2011-03-11 15:01 +0100 http://bitbucket.org/pypy/pypy/changeset/8dd1739969f4/ Log: close branch From commits-noreply at bitbucket.org Fri Mar 11 18:20:51 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:20:51 +0100 (CET) Subject: [pypy-svn] pypy default: hg merge getdict-signature Message-ID: <20110311172051.515CD282BA1@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42517:d6ad51f37c86 Date: 2011-03-11 15:01 +0100 http://bitbucket.org/pypy/pypy/changeset/d6ad51f37c86/ Log: hg merge getdict-signature From commits-noreply at bitbucket.org Fri Mar 11 18:20:52 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:20:52 +0100 (CET) Subject: [pypy-svn] pypy default: Ast objects have a __dict__ and can store any attribute. Message-ID: <20110311172052.71FBA282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42518:0e0490da48d1 Date: 2011-03-11 15:40 +0100 http://bitbucket.org/pypy/pypy/changeset/0e0490da48d1/ Log: Ast objects have a __dict__ and can store any attribute. diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -428,8 +428,8 @@ else: flag = self.data.field_masks[field] self.emit("if not w_self.initialization_state & %s:" % (flag,), 1) - self.emit("w_err = space.wrap(\"attribute '%s' has not been set\")" % - (field.name,), 2) + self.emit("w_err = space.wrap(\"'%s' object has no attribute '%s'\")" % + (name, field.name,), 2) self.emit("raise OperationError(space.w_AttributeError, w_err)", 2) if field.seq: self.emit("if w_self.w_%s is None:" % (field.name,), 1) @@ -554,7 +554,7 @@ class AST(Wrappable): - __slots__ = ("initialization_state",) + __slots__ = ("initialization_state", "w_dict") __metaclass__ = extendabletype @@ -567,6 +567,11 @@ def sync_app_attrs(self, space): raise NotImplementedError + def getdict(self, space): + if not hasattr(self, 'w_dict'): + self.w_dict = space.newdict(instance=True) + return self.w_dict + class NodeVisitorNotImplemented(Exception): pass diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -10,7 +10,7 @@ class AST(Wrappable): - __slots__ = ("initialization_state",) + __slots__ = ("initialization_state", "w_dict") __metaclass__ = extendabletype @@ -23,6 +23,11 @@ def sync_app_attrs(self, space): raise NotImplementedError + def getdict(self, space): + if not hasattr(self, 'w_dict'): + self.w_dict = space.newdict(instance=True) + return self.w_dict + class NodeVisitorNotImplemented(Exception): pass @@ -3053,7 +3058,7 @@ def Module_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Module' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3095,7 +3100,7 @@ def Interactive_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Interactive' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3137,7 +3142,7 @@ def Expression_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Expression' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -3171,7 +3176,7 @@ def Suite_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Suite' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3213,7 +3218,7 @@ def stmt_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("attribute 'lineno' has not been set") + w_err = space.wrap("'stmt' object has no attribute 'lineno'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -3223,7 +3228,7 @@ def stmt_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("attribute 'col_offset' has not been set") + w_err = space.wrap("'stmt' object has no attribute 'col_offset'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -3241,7 +3246,7 @@ def FunctionDef_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'name' has not been set") + w_err = space.wrap("'FunctionDef' object has no attribute 'name'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -3251,7 +3256,7 @@ def FunctionDef_get_args(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'args' has not been set") + w_err = space.wrap("'FunctionDef' object has no attribute 'args'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.args) @@ -3261,7 +3266,7 @@ def FunctionDef_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'FunctionDef' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3278,7 +3283,7 @@ def FunctionDef_get_decorator_list(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("attribute 'decorator_list' has not been set") + w_err = space.wrap("'FunctionDef' object has no attribute 'decorator_list'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_decorator_list is None: if w_self.decorator_list is None: @@ -3324,7 +3329,7 @@ def ClassDef_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'name' has not been set") + w_err = space.wrap("'ClassDef' object has no attribute 'name'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -3334,7 +3339,7 @@ def ClassDef_get_bases(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'bases' has not been set") + w_err = space.wrap("'ClassDef' object has no attribute 'bases'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_bases is None: if w_self.bases is None: @@ -3351,7 +3356,7 @@ def ClassDef_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'ClassDef' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3368,7 +3373,7 @@ def ClassDef_get_decorator_list(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("attribute 'decorator_list' has not been set") + w_err = space.wrap("'ClassDef' object has no attribute 'decorator_list'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_decorator_list is None: if w_self.decorator_list is None: @@ -3415,7 +3420,7 @@ def Return_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Return' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3449,7 +3454,7 @@ def Delete_get_targets(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'targets' has not been set") + w_err = space.wrap("'Delete' object has no attribute 'targets'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_targets is None: if w_self.targets is None: @@ -3491,7 +3496,7 @@ def Assign_get_targets(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'targets' has not been set") + w_err = space.wrap("'Assign' object has no attribute 'targets'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_targets is None: if w_self.targets is None: @@ -3508,7 +3513,7 @@ def Assign_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Assign' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3544,7 +3549,7 @@ def AugAssign_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'target' has not been set") + w_err = space.wrap("'AugAssign' object has no attribute 'target'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -3554,7 +3559,7 @@ def AugAssign_get_op(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'op' has not been set") + w_err = space.wrap("'AugAssign' object has no attribute 'op'") raise OperationError(space.w_AttributeError, w_err) return operator_to_class[w_self.op - 1]() @@ -3565,7 +3570,7 @@ def AugAssign_get_value(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'AugAssign' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3601,7 +3606,7 @@ def Print_get_dest(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'dest' has not been set") + w_err = space.wrap("'Print' object has no attribute 'dest'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.dest) @@ -3611,7 +3616,7 @@ def Print_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'values' has not been set") + w_err = space.wrap("'Print' object has no attribute 'values'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -3628,7 +3633,7 @@ def Print_get_nl(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'nl' has not been set") + w_err = space.wrap("'Print' object has no attribute 'nl'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.nl) @@ -3665,7 +3670,7 @@ def For_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'target' has not been set") + w_err = space.wrap("'For' object has no attribute 'target'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -3675,7 +3680,7 @@ def For_get_iter(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'iter' has not been set") + w_err = space.wrap("'For' object has no attribute 'iter'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.iter) @@ -3685,7 +3690,7 @@ def For_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'For' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3702,7 +3707,7 @@ def For_get_orelse(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("attribute 'orelse' has not been set") + w_err = space.wrap("'For' object has no attribute 'orelse'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3748,7 +3753,7 @@ def While_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'test' has not been set") + w_err = space.wrap("'While' object has no attribute 'test'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -3758,7 +3763,7 @@ def While_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'While' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3775,7 +3780,7 @@ def While_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'orelse' has not been set") + w_err = space.wrap("'While' object has no attribute 'orelse'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3820,7 +3825,7 @@ def If_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'test' has not been set") + w_err = space.wrap("'If' object has no attribute 'test'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -3830,7 +3835,7 @@ def If_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'If' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3847,7 +3852,7 @@ def If_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'orelse' has not been set") + w_err = space.wrap("'If' object has no attribute 'orelse'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3892,7 +3897,7 @@ def With_get_context_expr(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'context_expr' has not been set") + w_err = space.wrap("'With' object has no attribute 'context_expr'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.context_expr) @@ -3902,7 +3907,7 @@ def With_get_optional_vars(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'optional_vars' has not been set") + w_err = space.wrap("'With' object has no attribute 'optional_vars'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.optional_vars) @@ -3912,7 +3917,7 @@ def With_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'With' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3956,7 +3961,7 @@ def Raise_get_type(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'type' has not been set") + w_err = space.wrap("'Raise' object has no attribute 'type'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.type) @@ -3966,7 +3971,7 @@ def Raise_get_inst(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'inst' has not been set") + w_err = space.wrap("'Raise' object has no attribute 'inst'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.inst) @@ -3976,7 +3981,7 @@ def Raise_get_tback(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'tback' has not been set") + w_err = space.wrap("'Raise' object has no attribute 'tback'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.tback) @@ -4012,7 +4017,7 @@ def TryExcept_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'TryExcept' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -4029,7 +4034,7 @@ def TryExcept_get_handlers(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'handlers' has not been set") + w_err = space.wrap("'TryExcept' object has no attribute 'handlers'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_handlers is None: if w_self.handlers is None: @@ -4046,7 +4051,7 @@ def TryExcept_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'orelse' has not been set") + w_err = space.wrap("'TryExcept' object has no attribute 'orelse'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -4092,7 +4097,7 @@ def TryFinally_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'TryFinally' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -4109,7 +4114,7 @@ def TryFinally_get_finalbody(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'finalbody' has not been set") + w_err = space.wrap("'TryFinally' object has no attribute 'finalbody'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_finalbody is None: if w_self.finalbody is None: @@ -4153,7 +4158,7 @@ def Assert_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'test' has not been set") + w_err = space.wrap("'Assert' object has no attribute 'test'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -4163,7 +4168,7 @@ def Assert_get_msg(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'msg' has not been set") + w_err = space.wrap("'Assert' object has no attribute 'msg'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.msg) @@ -4198,7 +4203,7 @@ def Import_get_names(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'names' has not been set") + w_err = space.wrap("'Import' object has no attribute 'names'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4240,7 +4245,7 @@ def ImportFrom_get_module(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'module' has not been set") + w_err = space.wrap("'ImportFrom' object has no attribute 'module'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.module) @@ -4253,7 +4258,7 @@ def ImportFrom_get_names(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'names' has not been set") + w_err = space.wrap("'ImportFrom' object has no attribute 'names'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4270,7 +4275,7 @@ def ImportFrom_get_level(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'level' has not been set") + w_err = space.wrap("'ImportFrom' object has no attribute 'level'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.level) @@ -4307,7 +4312,7 @@ def Exec_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Exec' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4317,7 +4322,7 @@ def Exec_get_globals(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'globals' has not been set") + w_err = space.wrap("'Exec' object has no attribute 'globals'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.globals) @@ -4327,7 +4332,7 @@ def Exec_get_locals(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'locals' has not been set") + w_err = space.wrap("'Exec' object has no attribute 'locals'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.locals) @@ -4363,7 +4368,7 @@ def Global_get_names(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'names' has not been set") + w_err = space.wrap("'Global' object has no attribute 'names'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4405,7 +4410,7 @@ def Expr_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Expr' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -4508,7 +4513,7 @@ def expr_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("attribute 'lineno' has not been set") + w_err = space.wrap("'expr' object has no attribute 'lineno'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -4518,7 +4523,7 @@ def expr_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("attribute 'col_offset' has not been set") + w_err = space.wrap("'expr' object has no attribute 'col_offset'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -4536,7 +4541,7 @@ def BoolOp_get_op(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'op' has not been set") + w_err = space.wrap("'BoolOp' object has no attribute 'op'") raise OperationError(space.w_AttributeError, w_err) return boolop_to_class[w_self.op - 1]() @@ -4547,7 +4552,7 @@ def BoolOp_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'values' has not been set") + w_err = space.wrap("'BoolOp' object has no attribute 'values'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -4590,7 +4595,7 @@ def BinOp_get_left(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'left' has not been set") + w_err = space.wrap("'BinOp' object has no attribute 'left'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.left) @@ -4600,7 +4605,7 @@ def BinOp_get_op(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'op' has not been set") + w_err = space.wrap("'BinOp' object has no attribute 'op'") raise OperationError(space.w_AttributeError, w_err) return operator_to_class[w_self.op - 1]() @@ -4611,7 +4616,7 @@ def BinOp_get_right(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'right' has not been set") + w_err = space.wrap("'BinOp' object has no attribute 'right'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.right) @@ -4647,7 +4652,7 @@ def UnaryOp_get_op(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'op' has not been set") + w_err = space.wrap("'UnaryOp' object has no attribute 'op'") raise OperationError(space.w_AttributeError, w_err) return unaryop_to_class[w_self.op - 1]() @@ -4658,7 +4663,7 @@ def UnaryOp_get_operand(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'operand' has not been set") + w_err = space.wrap("'UnaryOp' object has no attribute 'operand'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.operand) @@ -4693,7 +4698,7 @@ def Lambda_get_args(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'args' has not been set") + w_err = space.wrap("'Lambda' object has no attribute 'args'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.args) @@ -4703,7 +4708,7 @@ def Lambda_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'Lambda' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4738,7 +4743,7 @@ def IfExp_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'test' has not been set") + w_err = space.wrap("'IfExp' object has no attribute 'test'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -4748,7 +4753,7 @@ def IfExp_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'IfExp' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4758,7 +4763,7 @@ def IfExp_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'orelse' has not been set") + w_err = space.wrap("'IfExp' object has no attribute 'orelse'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.orelse) @@ -4794,7 +4799,7 @@ def Dict_get_keys(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'keys' has not been set") + w_err = space.wrap("'Dict' object has no attribute 'keys'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_keys is None: if w_self.keys is None: @@ -4811,7 +4816,7 @@ def Dict_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'values' has not been set") + w_err = space.wrap("'Dict' object has no attribute 'values'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -4855,7 +4860,7 @@ def Set_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elts' has not been set") + w_err = space.wrap("'Set' object has no attribute 'elts'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -4897,7 +4902,7 @@ def ListComp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elt' has not been set") + w_err = space.wrap("'ListComp' object has no attribute 'elt'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -4907,7 +4912,7 @@ def ListComp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'generators' has not been set") + w_err = space.wrap("'ListComp' object has no attribute 'generators'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -4950,7 +4955,7 @@ def SetComp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elt' has not been set") + w_err = space.wrap("'SetComp' object has no attribute 'elt'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -4960,7 +4965,7 @@ def SetComp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'generators' has not been set") + w_err = space.wrap("'SetComp' object has no attribute 'generators'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -5003,7 +5008,7 @@ def DictComp_get_key(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'key' has not been set") + w_err = space.wrap("'DictComp' object has no attribute 'key'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.key) @@ -5013,7 +5018,7 @@ def DictComp_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'DictComp' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5023,7 +5028,7 @@ def DictComp_get_generators(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'generators' has not been set") + w_err = space.wrap("'DictComp' object has no attribute 'generators'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -5067,7 +5072,7 @@ def GeneratorExp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elt' has not been set") + w_err = space.wrap("'GeneratorExp' object has no attribute 'elt'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -5077,7 +5082,7 @@ def GeneratorExp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'generators' has not been set") + w_err = space.wrap("'GeneratorExp' object has no attribute 'generators'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -5120,7 +5125,7 @@ def Yield_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Yield' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5154,7 +5159,7 @@ def Compare_get_left(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'left' has not been set") + w_err = space.wrap("'Compare' object has no attribute 'left'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.left) @@ -5164,7 +5169,7 @@ def Compare_get_ops(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'ops' has not been set") + w_err = space.wrap("'Compare' object has no attribute 'ops'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_ops is None: if w_self.ops is None: @@ -5181,7 +5186,7 @@ def Compare_get_comparators(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'comparators' has not been set") + w_err = space.wrap("'Compare' object has no attribute 'comparators'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_comparators is None: if w_self.comparators is None: @@ -5226,7 +5231,7 @@ def Call_get_func(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'func' has not been set") + w_err = space.wrap("'Call' object has no attribute 'func'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.func) @@ -5236,7 +5241,7 @@ def Call_get_args(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'args' has not been set") + w_err = space.wrap("'Call' object has no attribute 'args'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_args is None: if w_self.args is None: @@ -5253,7 +5258,7 @@ def Call_get_keywords(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'keywords' has not been set") + w_err = space.wrap("'Call' object has no attribute 'keywords'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_keywords is None: if w_self.keywords is None: @@ -5270,7 +5275,7 @@ def Call_get_starargs(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("attribute 'starargs' has not been set") + w_err = space.wrap("'Call' object has no attribute 'starargs'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.starargs) @@ -5280,7 +5285,7 @@ def Call_get_kwargs(space, w_self): if not w_self.initialization_state & 16: - w_err = space.wrap("attribute 'kwargs' has not been set") + w_err = space.wrap("'Call' object has no attribute 'kwargs'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.kwargs) @@ -5320,7 +5325,7 @@ def Repr_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Repr' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5354,7 +5359,7 @@ def Num_get_n(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'n' has not been set") + w_err = space.wrap("'Num' object has no attribute 'n'") raise OperationError(space.w_AttributeError, w_err) return w_self.n @@ -5388,7 +5393,7 @@ def Str_get_s(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 's' has not been set") + w_err = space.wrap("'Str' object has no attribute 's'") raise OperationError(space.w_AttributeError, w_err) return w_self.s @@ -5425,7 +5430,7 @@ def Attribute_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Attribute' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5435,7 +5440,7 @@ def Attribute_get_attr(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'attr' has not been set") + w_err = space.wrap("'Attribute' object has no attribute 'attr'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.attr) @@ -5445,7 +5450,7 @@ def Attribute_get_ctx(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'ctx' has not been set") + w_err = space.wrap("'Attribute' object has no attribute 'ctx'") raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5482,7 +5487,7 @@ def Subscript_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Subscript' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5492,7 +5497,7 @@ def Subscript_get_slice(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'slice' has not been set") + w_err = space.wrap("'Subscript' object has no attribute 'slice'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.slice) @@ -5502,7 +5507,7 @@ def Subscript_get_ctx(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'ctx' has not been set") + w_err = space.wrap("'Subscript' object has no attribute 'ctx'") raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5539,7 +5544,7 @@ def Name_get_id(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'id' has not been set") + w_err = space.wrap("'Name' object has no attribute 'id'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.id) @@ -5549,7 +5554,7 @@ def Name_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'ctx' has not been set") + w_err = space.wrap("'Name' object has no attribute 'ctx'") raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5585,7 +5590,7 @@ def List_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elts' has not been set") + w_err = space.wrap("'List' object has no attribute 'elts'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -5602,7 +5607,7 @@ def List_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'ctx' has not been set") + w_err = space.wrap("'List' object has no attribute 'ctx'") raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5639,7 +5644,7 @@ def Tuple_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'elts' has not been set") + w_err = space.wrap("'Tuple' object has no attribute 'elts'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -5656,7 +5661,7 @@ def Tuple_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'ctx' has not been set") + w_err = space.wrap("'Tuple' object has no attribute 'ctx'") raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5693,7 +5698,7 @@ def Const_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Const' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return w_self.value @@ -5799,7 +5804,7 @@ def Slice_get_lower(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'lower' has not been set") + w_err = space.wrap("'Slice' object has no attribute 'lower'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lower) @@ -5809,7 +5814,7 @@ def Slice_get_upper(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'upper' has not been set") + w_err = space.wrap("'Slice' object has no attribute 'upper'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.upper) @@ -5819,7 +5824,7 @@ def Slice_get_step(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'step' has not been set") + w_err = space.wrap("'Slice' object has no attribute 'step'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.step) @@ -5855,7 +5860,7 @@ def ExtSlice_get_dims(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'dims' has not been set") + w_err = space.wrap("'ExtSlice' object has no attribute 'dims'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_dims is None: if w_self.dims is None: @@ -5897,7 +5902,7 @@ def Index_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'Index' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -6151,7 +6156,7 @@ def comprehension_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'target' has not been set") + w_err = space.wrap("'comprehension' object has no attribute 'target'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -6161,7 +6166,7 @@ def comprehension_get_iter(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'iter' has not been set") + w_err = space.wrap("'comprehension' object has no attribute 'iter'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.iter) @@ -6171,7 +6176,7 @@ def comprehension_get_ifs(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'ifs' has not been set") + w_err = space.wrap("'comprehension' object has no attribute 'ifs'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_ifs is None: if w_self.ifs is None: @@ -6215,7 +6220,7 @@ def excepthandler_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("attribute 'lineno' has not been set") + w_err = space.wrap("'excepthandler' object has no attribute 'lineno'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -6225,7 +6230,7 @@ def excepthandler_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("attribute 'col_offset' has not been set") + w_err = space.wrap("'excepthandler' object has no attribute 'col_offset'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -6243,7 +6248,7 @@ def ExceptHandler_get_type(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'type' has not been set") + w_err = space.wrap("'ExceptHandler' object has no attribute 'type'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.type) @@ -6253,7 +6258,7 @@ def ExceptHandler_get_name(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'name' has not been set") + w_err = space.wrap("'ExceptHandler' object has no attribute 'name'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -6263,7 +6268,7 @@ def ExceptHandler_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'body' has not been set") + w_err = space.wrap("'ExceptHandler' object has no attribute 'body'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -6307,7 +6312,7 @@ def arguments_get_args(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'args' has not been set") + w_err = space.wrap("'arguments' object has no attribute 'args'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_args is None: if w_self.args is None: @@ -6324,7 +6329,7 @@ def arguments_get_vararg(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'vararg' has not been set") + w_err = space.wrap("'arguments' object has no attribute 'vararg'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.vararg) @@ -6337,7 +6342,7 @@ def arguments_get_kwarg(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("attribute 'kwarg' has not been set") + w_err = space.wrap("'arguments' object has no attribute 'kwarg'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.kwarg) @@ -6350,7 +6355,7 @@ def arguments_get_defaults(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("attribute 'defaults' has not been set") + w_err = space.wrap("'arguments' object has no attribute 'defaults'") raise OperationError(space.w_AttributeError, w_err) if w_self.w_defaults is None: if w_self.defaults is None: @@ -6396,7 +6401,7 @@ def keyword_get_arg(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'arg' has not been set") + w_err = space.wrap("'keyword' object has no attribute 'arg'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.arg) @@ -6406,7 +6411,7 @@ def keyword_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'value' has not been set") + w_err = space.wrap("'keyword' object has no attribute 'value'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -6441,7 +6446,7 @@ def alias_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("attribute 'name' has not been set") + w_err = space.wrap("'alias' object has no attribute 'name'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -6451,7 +6456,7 @@ def alias_get_asname(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("attribute 'asname' has not been set") + w_err = space.wrap("'alias' object has no attribute 'asname'") raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.asname) diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -162,7 +162,7 @@ exc = raises(TypeError, ast.Module, 1, 2).value msg = str(exc) assert msg == "Module constructor takes 0 or 1 positional arguments" - raises(AttributeError, ast.Module, nothing=23) + ast.Module(nothing=23) def test_future(self): mod = self.get_ast("from __future__ import with_statement") @@ -174,6 +174,13 @@ "from __future__ import nested_scopes") raises(SyntaxError, compile, mod, "", "exec") + def test_field_attr_writable(self): + import _ast as ast + x = ast.Num() + # We can assign to _fields + x._fields = 666 + assert x._fields == 666 + def test_pickle(self): skip("XXX implement me") import pickle @@ -186,3 +193,45 @@ co2 = compile(mod2, "", "exec") exec co2 in ns assert ns["x"] == 4 + + def test_classattrs(self): + import ast + x = ast.Num() + assert x._fields == ('n',) + exc = raises(AttributeError, getattr, x, 'n') + assert exc.value.args[0] == "'Num' object has no attribute 'n'" + + skip("WIP") + + x = ast.Num(42) + self.assertEquals(x.n, 42) + try: + x.lineno + except AttributeError, e: + self.assertEquals(e.args[0], + "'Num' object has no attribute 'lineno'") + else: + self.assert_(False) + + y = ast.Num() + x.lineno = y + self.assertEquals(x.lineno, y) + + try: + x.foobar + except AttributeError, e: + self.assertEquals(e.args[0], + "'Num' object has no attribute 'foobar'") + else: + self.assert_(False) + + x = ast.Num(lineno=2) + self.assertEquals(x.lineno, 2) + + x = ast.Num(42, lineno=0) + self.assertEquals(x.lineno, 0) + self.assertEquals(x._fields, ('n',)) + self.assertEquals(x.n, 42) + + self.assertRaises(TypeError, ast.Num, 1, 2) + self.assertRaises(TypeError, ast.Num, 1, 2, lineno=0) From commits-noreply at bitbucket.org Fri Mar 11 18:20:55 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:20:55 +0100 (CET) Subject: [pypy-svn] pypy default: lineno and col_offset are now keyword-only arguments of the Ast constructor. Message-ID: <20110311172055.64400282BD7@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42519:8d281bd3780e Date: 2011-03-11 16:35 +0100 http://bitbucket.org/pypy/pypy/changeset/8d281bd3780e/ Log: lineno and col_offset are now keyword-only arguments of the Ast constructor. They are attributes, mandatory for the compiler. diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -407,7 +407,7 @@ def visitConstructor(self, cons, base): super(AppExposeVisitor, self).visitConstructor(cons, cons.name) - self.make_init(cons.name, cons.fields + self.data.cons_attributes[cons]) + self.make_init(cons.name, cons.fields) self.make_typedef(cons.name, base, cons.fields, concrete=True, needs_init=True) @@ -608,7 +608,7 @@ if not (state >> i) & 1: missing = required[i] if missing is not None: - err = "required attribute '%s' missing from %s" + err = "required field \\"%s\\" missing from %s" err = err % (missing, host) w_err = space.wrap(err) raise OperationError(space.w_TypeError, w_err) diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -64,7 +64,7 @@ if not (state >> i) & 1: missing = required[i] if missing is not None: - err = "required attribute '%s' missing from %s" + err = "required field \"%s\" missing from %s" err = err % (missing, host) w_err = space.wrap(err) raise OperationError(space.w_TypeError, w_err) @@ -3298,15 +3298,15 @@ w_self.w_decorator_list = w_new_value w_self.initialization_state |= 8 -_FunctionDef_field_unroller = unrolling_iterable(['name', 'args', 'body', 'decorator_list', 'lineno', 'col_offset']) +_FunctionDef_field_unroller = unrolling_iterable(['name', 'args', 'body', 'decorator_list']) def FunctionDef_init(space, w_self, __args__): w_self = space.descr_self_interp_w(FunctionDef, w_self) w_self.w_body = None w_self.w_decorator_list = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 6: - w_err = space.wrap("FunctionDef constructor takes 0 or 6 positional arguments") + if len(args_w) != 4: + w_err = space.wrap("FunctionDef constructor takes 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _FunctionDef_field_unroller: @@ -3388,7 +3388,7 @@ w_self.w_decorator_list = w_new_value w_self.initialization_state |= 8 -_ClassDef_field_unroller = unrolling_iterable(['name', 'bases', 'body', 'decorator_list', 'lineno', 'col_offset']) +_ClassDef_field_unroller = unrolling_iterable(['name', 'bases', 'body', 'decorator_list']) def ClassDef_init(space, w_self, __args__): w_self = space.descr_self_interp_w(ClassDef, w_self) w_self.w_bases = None @@ -3396,8 +3396,8 @@ w_self.w_decorator_list = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 6: - w_err = space.wrap("ClassDef constructor takes 0 or 6 positional arguments") + if len(args_w) != 4: + w_err = space.wrap("ClassDef constructor takes 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ClassDef_field_unroller: @@ -3428,13 +3428,13 @@ w_self.value = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 1 -_Return_field_unroller = unrolling_iterable(['value', 'lineno', 'col_offset']) +_Return_field_unroller = unrolling_iterable(['value']) def Return_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Return, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Return constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Return constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Return_field_unroller: @@ -3469,14 +3469,14 @@ w_self.w_targets = w_new_value w_self.initialization_state |= 1 -_Delete_field_unroller = unrolling_iterable(['targets', 'lineno', 'col_offset']) +_Delete_field_unroller = unrolling_iterable(['targets']) def Delete_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Delete, w_self) w_self.w_targets = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Delete constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Delete constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Delete_field_unroller: @@ -3521,14 +3521,14 @@ w_self.value = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 2 -_Assign_field_unroller = unrolling_iterable(['targets', 'value', 'lineno', 'col_offset']) +_Assign_field_unroller = unrolling_iterable(['targets', 'value']) def Assign_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Assign, w_self) w_self.w_targets = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Assign constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Assign constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Assign_field_unroller: @@ -3578,13 +3578,13 @@ w_self.value = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 4 -_AugAssign_field_unroller = unrolling_iterable(['target', 'op', 'value', 'lineno', 'col_offset']) +_AugAssign_field_unroller = unrolling_iterable(['target', 'op', 'value']) def AugAssign_init(space, w_self, __args__): w_self = space.descr_self_interp_w(AugAssign, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("AugAssign constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("AugAssign constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _AugAssign_field_unroller: @@ -3641,14 +3641,14 @@ w_self.nl = space.bool_w(w_new_value) w_self.initialization_state |= 4 -_Print_field_unroller = unrolling_iterable(['dest', 'values', 'nl', 'lineno', 'col_offset']) +_Print_field_unroller = unrolling_iterable(['dest', 'values', 'nl']) def Print_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Print, w_self) w_self.w_values = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Print constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Print constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Print_field_unroller: @@ -3722,15 +3722,15 @@ w_self.w_orelse = w_new_value w_self.initialization_state |= 8 -_For_field_unroller = unrolling_iterable(['target', 'iter', 'body', 'orelse', 'lineno', 'col_offset']) +_For_field_unroller = unrolling_iterable(['target', 'iter', 'body', 'orelse']) def For_init(space, w_self, __args__): w_self = space.descr_self_interp_w(For, w_self) w_self.w_body = None w_self.w_orelse = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 6: - w_err = space.wrap("For constructor takes 0 or 6 positional arguments") + if len(args_w) != 4: + w_err = space.wrap("For constructor takes 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _For_field_unroller: @@ -3795,15 +3795,15 @@ w_self.w_orelse = w_new_value w_self.initialization_state |= 4 -_While_field_unroller = unrolling_iterable(['test', 'body', 'orelse', 'lineno', 'col_offset']) +_While_field_unroller = unrolling_iterable(['test', 'body', 'orelse']) def While_init(space, w_self, __args__): w_self = space.descr_self_interp_w(While, w_self) w_self.w_body = None w_self.w_orelse = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("While constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("While constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _While_field_unroller: @@ -3867,15 +3867,15 @@ w_self.w_orelse = w_new_value w_self.initialization_state |= 4 -_If_field_unroller = unrolling_iterable(['test', 'body', 'orelse', 'lineno', 'col_offset']) +_If_field_unroller = unrolling_iterable(['test', 'body', 'orelse']) def If_init(space, w_self, __args__): w_self = space.descr_self_interp_w(If, w_self) w_self.w_body = None w_self.w_orelse = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("If constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("If constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _If_field_unroller: @@ -3932,14 +3932,14 @@ w_self.w_body = w_new_value w_self.initialization_state |= 4 -_With_field_unroller = unrolling_iterable(['context_expr', 'optional_vars', 'body', 'lineno', 'col_offset']) +_With_field_unroller = unrolling_iterable(['context_expr', 'optional_vars', 'body']) def With_init(space, w_self, __args__): w_self = space.descr_self_interp_w(With, w_self) w_self.w_body = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("With constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("With constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _With_field_unroller: @@ -3989,13 +3989,13 @@ w_self.tback = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 4 -_Raise_field_unroller = unrolling_iterable(['type', 'inst', 'tback', 'lineno', 'col_offset']) +_Raise_field_unroller = unrolling_iterable(['type', 'inst', 'tback']) def Raise_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Raise, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Raise constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Raise constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Raise_field_unroller: @@ -4066,7 +4066,7 @@ w_self.w_orelse = w_new_value w_self.initialization_state |= 4 -_TryExcept_field_unroller = unrolling_iterable(['body', 'handlers', 'orelse', 'lineno', 'col_offset']) +_TryExcept_field_unroller = unrolling_iterable(['body', 'handlers', 'orelse']) def TryExcept_init(space, w_self, __args__): w_self = space.descr_self_interp_w(TryExcept, w_self) w_self.w_body = None @@ -4074,8 +4074,8 @@ w_self.w_orelse = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("TryExcept constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("TryExcept constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _TryExcept_field_unroller: @@ -4129,15 +4129,15 @@ w_self.w_finalbody = w_new_value w_self.initialization_state |= 2 -_TryFinally_field_unroller = unrolling_iterable(['body', 'finalbody', 'lineno', 'col_offset']) +_TryFinally_field_unroller = unrolling_iterable(['body', 'finalbody']) def TryFinally_init(space, w_self, __args__): w_self = space.descr_self_interp_w(TryFinally, w_self) w_self.w_body = None w_self.w_finalbody = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("TryFinally constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("TryFinally constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _TryFinally_field_unroller: @@ -4176,13 +4176,13 @@ w_self.msg = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 2 -_Assert_field_unroller = unrolling_iterable(['test', 'msg', 'lineno', 'col_offset']) +_Assert_field_unroller = unrolling_iterable(['test', 'msg']) def Assert_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Assert, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Assert constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Assert constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Assert_field_unroller: @@ -4218,14 +4218,14 @@ w_self.w_names = w_new_value w_self.initialization_state |= 1 -_Import_field_unroller = unrolling_iterable(['names', 'lineno', 'col_offset']) +_Import_field_unroller = unrolling_iterable(['names']) def Import_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Import, w_self) w_self.w_names = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Import constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Import constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Import_field_unroller: @@ -4283,14 +4283,14 @@ w_self.level = space.int_w(w_new_value) w_self.initialization_state |= 4 -_ImportFrom_field_unroller = unrolling_iterable(['module', 'names', 'level', 'lineno', 'col_offset']) +_ImportFrom_field_unroller = unrolling_iterable(['module', 'names', 'level']) def ImportFrom_init(space, w_self, __args__): w_self = space.descr_self_interp_w(ImportFrom, w_self) w_self.w_names = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("ImportFrom constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("ImportFrom constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ImportFrom_field_unroller: @@ -4340,13 +4340,13 @@ w_self.locals = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 4 -_Exec_field_unroller = unrolling_iterable(['body', 'globals', 'locals', 'lineno', 'col_offset']) +_Exec_field_unroller = unrolling_iterable(['body', 'globals', 'locals']) def Exec_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Exec, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Exec constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Exec constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Exec_field_unroller: @@ -4383,14 +4383,14 @@ w_self.w_names = w_new_value w_self.initialization_state |= 1 -_Global_field_unroller = unrolling_iterable(['names', 'lineno', 'col_offset']) +_Global_field_unroller = unrolling_iterable(['names']) def Global_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Global, w_self) w_self.w_names = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Global constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Global constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Global_field_unroller: @@ -4418,13 +4418,13 @@ w_self.value = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 1 -_Expr_field_unroller = unrolling_iterable(['value', 'lineno', 'col_offset']) +_Expr_field_unroller = unrolling_iterable(['value']) def Expr_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Expr, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Expr constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Expr constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Expr_field_unroller: @@ -4442,18 +4442,13 @@ ) Expr.typedef.acceptable_as_base_class = False -_Pass_field_unroller = unrolling_iterable(['lineno', 'col_offset']) +_Pass_field_unroller = unrolling_iterable([]) def Pass_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Pass, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 2: - w_err = space.wrap("Pass constructor takes 0 or 2 positional arguments") - raise OperationError(space.w_TypeError, w_err) - i = 0 - for field in _Pass_field_unroller: - space.setattr(w_self, space.wrap(field), args_w[i]) - i += 1 + w_err = space.wrap("Pass constructor takes no arguments") + raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -4465,18 +4460,13 @@ ) Pass.typedef.acceptable_as_base_class = False -_Break_field_unroller = unrolling_iterable(['lineno', 'col_offset']) +_Break_field_unroller = unrolling_iterable([]) def Break_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Break, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 2: - w_err = space.wrap("Break constructor takes 0 or 2 positional arguments") - raise OperationError(space.w_TypeError, w_err) - i = 0 - for field in _Break_field_unroller: - space.setattr(w_self, space.wrap(field), args_w[i]) - i += 1 + w_err = space.wrap("Break constructor takes no arguments") + raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -4488,18 +4478,13 @@ ) Break.typedef.acceptable_as_base_class = False -_Continue_field_unroller = unrolling_iterable(['lineno', 'col_offset']) +_Continue_field_unroller = unrolling_iterable([]) def Continue_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Continue, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 2: - w_err = space.wrap("Continue constructor takes 0 or 2 positional arguments") - raise OperationError(space.w_TypeError, w_err) - i = 0 - for field in _Continue_field_unroller: - space.setattr(w_self, space.wrap(field), args_w[i]) - i += 1 + w_err = space.wrap("Continue constructor takes no arguments") + raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -4567,14 +4552,14 @@ w_self.w_values = w_new_value w_self.initialization_state |= 2 -_BoolOp_field_unroller = unrolling_iterable(['op', 'values', 'lineno', 'col_offset']) +_BoolOp_field_unroller = unrolling_iterable(['op', 'values']) def BoolOp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(BoolOp, w_self) w_self.w_values = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("BoolOp constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("BoolOp constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _BoolOp_field_unroller: @@ -4624,13 +4609,13 @@ w_self.right = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 4 -_BinOp_field_unroller = unrolling_iterable(['left', 'op', 'right', 'lineno', 'col_offset']) +_BinOp_field_unroller = unrolling_iterable(['left', 'op', 'right']) def BinOp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(BinOp, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("BinOp constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("BinOp constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _BinOp_field_unroller: @@ -4671,13 +4656,13 @@ w_self.operand = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 2 -_UnaryOp_field_unroller = unrolling_iterable(['op', 'operand', 'lineno', 'col_offset']) +_UnaryOp_field_unroller = unrolling_iterable(['op', 'operand']) def UnaryOp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(UnaryOp, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("UnaryOp constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("UnaryOp constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _UnaryOp_field_unroller: @@ -4716,13 +4701,13 @@ w_self.body = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 2 -_Lambda_field_unroller = unrolling_iterable(['args', 'body', 'lineno', 'col_offset']) +_Lambda_field_unroller = unrolling_iterable(['args', 'body']) def Lambda_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Lambda, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Lambda constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Lambda constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Lambda_field_unroller: @@ -4771,13 +4756,13 @@ w_self.orelse = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 4 -_IfExp_field_unroller = unrolling_iterable(['test', 'body', 'orelse', 'lineno', 'col_offset']) +_IfExp_field_unroller = unrolling_iterable(['test', 'body', 'orelse']) def IfExp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(IfExp, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("IfExp constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("IfExp constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _IfExp_field_unroller: @@ -4831,15 +4816,15 @@ w_self.w_values = w_new_value w_self.initialization_state |= 2 -_Dict_field_unroller = unrolling_iterable(['keys', 'values', 'lineno', 'col_offset']) +_Dict_field_unroller = unrolling_iterable(['keys', 'values']) def Dict_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Dict, w_self) w_self.w_keys = None w_self.w_values = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Dict constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Dict constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Dict_field_unroller: @@ -4875,14 +4860,14 @@ w_self.w_elts = w_new_value w_self.initialization_state |= 1 -_Set_field_unroller = unrolling_iterable(['elts', 'lineno', 'col_offset']) +_Set_field_unroller = unrolling_iterable(['elts']) def Set_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Set, w_self) w_self.w_elts = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Set constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Set constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Set_field_unroller: @@ -4927,14 +4912,14 @@ w_self.w_generators = w_new_value w_self.initialization_state |= 2 -_ListComp_field_unroller = unrolling_iterable(['elt', 'generators', 'lineno', 'col_offset']) +_ListComp_field_unroller = unrolling_iterable(['elt', 'generators']) def ListComp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(ListComp, w_self) w_self.w_generators = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("ListComp constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("ListComp constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ListComp_field_unroller: @@ -4980,14 +4965,14 @@ w_self.w_generators = w_new_value w_self.initialization_state |= 2 -_SetComp_field_unroller = unrolling_iterable(['elt', 'generators', 'lineno', 'col_offset']) +_SetComp_field_unroller = unrolling_iterable(['elt', 'generators']) def SetComp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(SetComp, w_self) w_self.w_generators = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("SetComp constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("SetComp constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _SetComp_field_unroller: @@ -5043,14 +5028,14 @@ w_self.w_generators = w_new_value w_self.initialization_state |= 4 -_DictComp_field_unroller = unrolling_iterable(['key', 'value', 'generators', 'lineno', 'col_offset']) +_DictComp_field_unroller = unrolling_iterable(['key', 'value', 'generators']) def DictComp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(DictComp, w_self) w_self.w_generators = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("DictComp constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("DictComp constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _DictComp_field_unroller: @@ -5097,14 +5082,14 @@ w_self.w_generators = w_new_value w_self.initialization_state |= 2 -_GeneratorExp_field_unroller = unrolling_iterable(['elt', 'generators', 'lineno', 'col_offset']) +_GeneratorExp_field_unroller = unrolling_iterable(['elt', 'generators']) def GeneratorExp_init(space, w_self, __args__): w_self = space.descr_self_interp_w(GeneratorExp, w_self) w_self.w_generators = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("GeneratorExp constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("GeneratorExp constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _GeneratorExp_field_unroller: @@ -5133,13 +5118,13 @@ w_self.value = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 1 -_Yield_field_unroller = unrolling_iterable(['value', 'lineno', 'col_offset']) +_Yield_field_unroller = unrolling_iterable(['value']) def Yield_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Yield, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Yield constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Yield constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Yield_field_unroller: @@ -5201,15 +5186,15 @@ w_self.w_comparators = w_new_value w_self.initialization_state |= 4 -_Compare_field_unroller = unrolling_iterable(['left', 'ops', 'comparators', 'lineno', 'col_offset']) +_Compare_field_unroller = unrolling_iterable(['left', 'ops', 'comparators']) def Compare_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Compare, w_self) w_self.w_ops = None w_self.w_comparators = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Compare constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Compare constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Compare_field_unroller: @@ -5293,15 +5278,15 @@ w_self.kwargs = space.interp_w(expr, w_new_value, True) w_self.initialization_state |= 16 -_Call_field_unroller = unrolling_iterable(['func', 'args', 'keywords', 'starargs', 'kwargs', 'lineno', 'col_offset']) +_Call_field_unroller = unrolling_iterable(['func', 'args', 'keywords', 'starargs', 'kwargs']) def Call_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Call, w_self) w_self.w_args = None w_self.w_keywords = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 7: - w_err = space.wrap("Call constructor takes 0 or 7 positional arguments") + if len(args_w) != 5: + w_err = space.wrap("Call constructor takes 0 or 5 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Call_field_unroller: @@ -5333,13 +5318,13 @@ w_self.value = space.interp_w(expr, w_new_value, False) w_self.initialization_state |= 1 -_Repr_field_unroller = unrolling_iterable(['value', 'lineno', 'col_offset']) +_Repr_field_unroller = unrolling_iterable(['value']) def Repr_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Repr, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Repr constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Repr constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Repr_field_unroller: @@ -5367,13 +5352,13 @@ w_self.n = w_new_value w_self.initialization_state |= 1 -_Num_field_unroller = unrolling_iterable(['n', 'lineno', 'col_offset']) +_Num_field_unroller = unrolling_iterable(['n']) def Num_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Num, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Num constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Num constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Num_field_unroller: @@ -5404,13 +5389,13 @@ w_self.s = w_new_value w_self.initialization_state |= 1 -_Str_field_unroller = unrolling_iterable(['s', 'lineno', 'col_offset']) +_Str_field_unroller = unrolling_iterable(['s']) def Str_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Str, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Str constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Str constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Str_field_unroller: @@ -5459,13 +5444,13 @@ w_self.ctx = obj.to_simple_int(space) w_self.initialization_state |= 4 -_Attribute_field_unroller = unrolling_iterable(['value', 'attr', 'ctx', 'lineno', 'col_offset']) +_Attribute_field_unroller = unrolling_iterable(['value', 'attr', 'ctx']) def Attribute_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Attribute, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Attribute constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Attribute constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Attribute_field_unroller: @@ -5516,13 +5501,13 @@ w_self.ctx = obj.to_simple_int(space) w_self.initialization_state |= 4 -_Subscript_field_unroller = unrolling_iterable(['value', 'slice', 'ctx', 'lineno', 'col_offset']) +_Subscript_field_unroller = unrolling_iterable(['value', 'slice', 'ctx']) def Subscript_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Subscript, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("Subscript constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("Subscript constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Subscript_field_unroller: @@ -5563,13 +5548,13 @@ w_self.ctx = obj.to_simple_int(space) w_self.initialization_state |= 2 -_Name_field_unroller = unrolling_iterable(['id', 'ctx', 'lineno', 'col_offset']) +_Name_field_unroller = unrolling_iterable(['id', 'ctx']) def Name_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Name, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Name constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Name constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Name_field_unroller: @@ -5616,14 +5601,14 @@ w_self.ctx = obj.to_simple_int(space) w_self.initialization_state |= 2 -_List_field_unroller = unrolling_iterable(['elts', 'ctx', 'lineno', 'col_offset']) +_List_field_unroller = unrolling_iterable(['elts', 'ctx']) def List_init(space, w_self, __args__): w_self = space.descr_self_interp_w(List, w_self) w_self.w_elts = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("List constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("List constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _List_field_unroller: @@ -5670,14 +5655,14 @@ w_self.ctx = obj.to_simple_int(space) w_self.initialization_state |= 2 -_Tuple_field_unroller = unrolling_iterable(['elts', 'ctx', 'lineno', 'col_offset']) +_Tuple_field_unroller = unrolling_iterable(['elts', 'ctx']) def Tuple_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Tuple, w_self) w_self.w_elts = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 4: - w_err = space.wrap("Tuple constructor takes 0 or 4 positional arguments") + if len(args_w) != 2: + w_err = space.wrap("Tuple constructor takes 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Tuple_field_unroller: @@ -5706,13 +5691,13 @@ w_self.value = w_new_value w_self.initialization_state |= 1 -_Const_field_unroller = unrolling_iterable(['value', 'lineno', 'col_offset']) +_Const_field_unroller = unrolling_iterable(['value']) def Const_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Const, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 3: - w_err = space.wrap("Const constructor takes 0 or 3 positional arguments") + if len(args_w) != 1: + w_err = space.wrap("Const constructor takes 0 or 1 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Const_field_unroller: @@ -6283,14 +6268,14 @@ w_self.w_body = w_new_value w_self.initialization_state |= 4 -_ExceptHandler_field_unroller = unrolling_iterable(['type', 'name', 'body', 'lineno', 'col_offset']) +_ExceptHandler_field_unroller = unrolling_iterable(['type', 'name', 'body']) def ExceptHandler_init(space, w_self, __args__): w_self = space.descr_self_interp_w(ExceptHandler, w_self) w_self.w_body = None args_w, kwargs_w = __args__.unpack() if args_w: - if len(args_w) != 5: - w_err = space.wrap("ExceptHandler constructor takes 0 or 5 positional arguments") + if len(args_w) != 3: + w_err = space.wrap("ExceptHandler constructor takes 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ExceptHandler_field_unroller: diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -60,24 +60,25 @@ mod = ast.Module() raises(AttributeError, getattr, mod, "body") exc = raises(TypeError, com, mod).value - assert str(exc) == "required attribute 'body' missing from Module" + assert str(exc) == "required field \"body\" missing from Module" expr = ast.Name() expr.id = "hi" expr.ctx = ast.Load() expr.lineno = 4 - exc = raises(TypeError, com, ast.Module([ast.Expr(expr, 0, 0)])).value - assert str(exc) == "required attribute 'col_offset' missing from Name" + exc = raises(TypeError, com, ast.Module([ast.Expr(expr)])).value + assert (str(exc) == "required field \"lineno\" missing from stmt" or # cpython + str(exc) == "required field \"lineno\" missing from Expr") # pypy, better def test_int(self): ast = self.ast - imp = ast.ImportFrom("", ["apples"], -1, 0, 0) + imp = ast.ImportFrom("", ["apples"], -1) assert imp.level == -1 imp.level = 3 assert imp.level == 3 def test_identifier(self): ast = self.ast - name = ast.Name("name_word", ast.Load(), 0, 0) + name = ast.Name("name_word", ast.Load()) assert name.id == "name_word" name.id = "hi" assert name.id == "hi" @@ -85,7 +86,7 @@ def test_bool(self): ast = self.ast - pr = ast.Print(None, [ast.Name("hi", ast.Load(), 0, 0)], False, 0, 0) + pr = ast.Print(None, [ast.Name("hi", ast.Load())], False) assert not pr.nl assert isinstance(pr.nl, bool) pr.nl = True @@ -93,7 +94,7 @@ def test_object(self): ast = self.ast - const = ast.Const(4, 0, 0) + const = ast.Const(4) assert const.value == 4 const.value = 5 assert const.value == 5 @@ -114,9 +115,12 @@ mod = self.get_ast("x = y = 3") assign = mod.body[0] assert len(assign.targets) == 2 - assign.targets[1] = ast.Name("lemon", ast.Store(), 0, 0) - name = ast.Name("apple", ast.Store(), 0, 0) - mod.body.append(ast.Assign([name], ast.Num(4, 0, 0), 0, 0)) + assign.targets[1] = ast.Name("lemon", ast.Store(), + lineno=0, col_offset=0) + name = ast.Name("apple", ast.Store(), + lineno=0, col_offset=0) + mod.body.append(ast.Assign([name], ast.Num(4, lineno=0, col_offset=0), + lineno=0, col_offset=0)) co = compile(mod, "", "exec") ns = {} exec co in ns @@ -141,10 +145,10 @@ body = [] mod = ast.Module(body) assert mod.body is body - target = ast.Name("hi", ast.Store(), 0, 0) - expr = ast.Name("apples", ast.Load(), 0, 0) + target = ast.Name("hi", ast.Store()) + expr = ast.Name("apples", ast.Load()) otherwise = [] - fr = ast.For(target, expr, body, otherwise, 0, 1) + fr = ast.For(target, expr, body, otherwise, lineno=0, col_offset=1) assert fr.target is target assert fr.iter is expr assert fr.orelse is otherwise From commits-noreply at bitbucket.org Fri Mar 11 18:20:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:20:57 +0100 (CET) Subject: [pypy-svn] pypy default: Small tweak and optimization Message-ID: <20110311172057.A5FC1282BD4@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42520:2f23e4b02b21 Date: 2011-03-11 16:37 +0100 http://bitbucket.org/pypy/pypy/changeset/2f23e4b02b21/ Log: Small tweak and optimization diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -378,8 +378,9 @@ def make_init(self, name, fields): comma_fields = ", ".join(repr(field.name.value) for field in fields) - config = (name, comma_fields) - self.emit("_%s_field_unroller = unrolling_iterable([%s])" % config) + if fields: + config = (name, comma_fields) + self.emit("_%s_field_unroller = unrolling_iterable([%s])" % config) self.emit("def %s_init(space, w_self, __args__):" % (name,)) self.emit("w_self = space.descr_self_interp_w(%s, w_self)" % (name,), 1) for field in fields: @@ -399,7 +400,7 @@ self.emit("i += 1", 3) else: self.emit("w_err = space.wrap(\"%s constructor takes no " \ - " arguments\")" % (name,), 2) + "arguments\")" % (name,), 2) self.emit("raise OperationError(space.w_TypeError, w_err)", 2) self.emit("for field, w_value in kwargs_w.iteritems():", 1) self.emit("space.setattr(w_self, space.wrap(field), w_value)", 2) diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -4442,12 +4442,11 @@ ) Expr.typedef.acceptable_as_base_class = False -_Pass_field_unroller = unrolling_iterable([]) def Pass_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Pass, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - w_err = space.wrap("Pass constructor takes no arguments") + w_err = space.wrap("Pass constructor takes no arguments") raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -4460,12 +4459,11 @@ ) Pass.typedef.acceptable_as_base_class = False -_Break_field_unroller = unrolling_iterable([]) def Break_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Break, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - w_err = space.wrap("Break constructor takes no arguments") + w_err = space.wrap("Break constructor takes no arguments") raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -4478,12 +4476,11 @@ ) Break.typedef.acceptable_as_base_class = False -_Continue_field_unroller = unrolling_iterable([]) def Continue_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Continue, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - w_err = space.wrap("Continue constructor takes no arguments") + w_err = space.wrap("Continue constructor takes no arguments") raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) @@ -5769,12 +5766,11 @@ ) slice.typedef.acceptable_as_base_class = False -_Ellipsis_field_unroller = unrolling_iterable([]) def Ellipsis_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Ellipsis, w_self) args_w, kwargs_w = __args__.unpack() if args_w: - w_err = space.wrap("Ellipsis constructor takes no arguments") + w_err = space.wrap("Ellipsis constructor takes no arguments") raise OperationError(space.w_TypeError, w_err) for field, w_value in kwargs_w.iteritems(): space.setattr(w_self, space.wrap(field), w_value) From commits-noreply at bitbucket.org Fri Mar 11 18:21:00 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:21:00 +0100 (CET) Subject: [pypy-svn] pypy default: Display the name of the derived class in AttributeError messages Message-ID: <20110311172100.86F06282BAA@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42521:09ee6be1af7a Date: 2011-03-11 17:00 +0100 http://bitbucket.org/pypy/pypy/changeset/09ee6be1af7a/ Log: Display the name of the derived class in AttributeError messages diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -429,8 +429,9 @@ else: flag = self.data.field_masks[field] self.emit("if not w_self.initialization_state & %s:" % (flag,), 1) - self.emit("w_err = space.wrap(\"'%s' object has no attribute '%s'\")" % - (name, field.name,), 2) + self.emit("typename = space.type(w_self).getname(space)", 2) + self.emit("w_err = space.wrap(\"'%%s' object has no attribute '%s'\" %% typename)" % + (field.name,), 2) self.emit("raise OperationError(space.w_AttributeError, w_err)", 2) if field.seq: self.emit("if w_self.w_%s is None:" % (field.name,), 1) diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -3058,7 +3058,8 @@ def Module_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Module' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3100,7 +3101,8 @@ def Interactive_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Interactive' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3142,7 +3144,8 @@ def Expression_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Expression' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -3176,7 +3179,8 @@ def Suite_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Suite' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3218,7 +3222,8 @@ def stmt_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("'stmt' object has no attribute 'lineno'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -3228,7 +3233,8 @@ def stmt_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("'stmt' object has no attribute 'col_offset'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -3246,7 +3252,8 @@ def FunctionDef_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'FunctionDef' object has no attribute 'name'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'name'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -3256,7 +3263,8 @@ def FunctionDef_get_args(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'FunctionDef' object has no attribute 'args'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'args'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.args) @@ -3266,7 +3274,8 @@ def FunctionDef_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'FunctionDef' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3283,7 +3292,8 @@ def FunctionDef_get_decorator_list(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("'FunctionDef' object has no attribute 'decorator_list'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'decorator_list'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_decorator_list is None: if w_self.decorator_list is None: @@ -3329,7 +3339,8 @@ def ClassDef_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'ClassDef' object has no attribute 'name'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'name'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -3339,7 +3350,8 @@ def ClassDef_get_bases(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'ClassDef' object has no attribute 'bases'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'bases'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_bases is None: if w_self.bases is None: @@ -3356,7 +3368,8 @@ def ClassDef_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'ClassDef' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3373,7 +3386,8 @@ def ClassDef_get_decorator_list(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("'ClassDef' object has no attribute 'decorator_list'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'decorator_list'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_decorator_list is None: if w_self.decorator_list is None: @@ -3420,7 +3434,8 @@ def Return_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Return' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3454,7 +3469,8 @@ def Delete_get_targets(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Delete' object has no attribute 'targets'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'targets'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_targets is None: if w_self.targets is None: @@ -3496,7 +3512,8 @@ def Assign_get_targets(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Assign' object has no attribute 'targets'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'targets'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_targets is None: if w_self.targets is None: @@ -3513,7 +3530,8 @@ def Assign_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Assign' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3549,7 +3567,8 @@ def AugAssign_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'AugAssign' object has no attribute 'target'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'target'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -3559,7 +3578,8 @@ def AugAssign_get_op(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'AugAssign' object has no attribute 'op'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'op'" % typename) raise OperationError(space.w_AttributeError, w_err) return operator_to_class[w_self.op - 1]() @@ -3570,7 +3590,8 @@ def AugAssign_get_value(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'AugAssign' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -3606,7 +3627,8 @@ def Print_get_dest(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Print' object has no attribute 'dest'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'dest'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.dest) @@ -3616,7 +3638,8 @@ def Print_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Print' object has no attribute 'values'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'values'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -3633,7 +3656,8 @@ def Print_get_nl(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Print' object has no attribute 'nl'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'nl'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.nl) @@ -3670,7 +3694,8 @@ def For_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'For' object has no attribute 'target'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'target'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -3680,7 +3705,8 @@ def For_get_iter(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'For' object has no attribute 'iter'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'iter'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.iter) @@ -3690,7 +3716,8 @@ def For_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'For' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3707,7 +3734,8 @@ def For_get_orelse(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("'For' object has no attribute 'orelse'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3753,7 +3781,8 @@ def While_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'While' object has no attribute 'test'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'test'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -3763,7 +3792,8 @@ def While_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'While' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3780,7 +3810,8 @@ def While_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'While' object has no attribute 'orelse'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3825,7 +3856,8 @@ def If_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'If' object has no attribute 'test'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'test'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -3835,7 +3867,8 @@ def If_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'If' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3852,7 +3885,8 @@ def If_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'If' object has no attribute 'orelse'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -3897,7 +3931,8 @@ def With_get_context_expr(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'With' object has no attribute 'context_expr'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'context_expr'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.context_expr) @@ -3907,7 +3942,8 @@ def With_get_optional_vars(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'With' object has no attribute 'optional_vars'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'optional_vars'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.optional_vars) @@ -3917,7 +3953,8 @@ def With_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'With' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -3961,7 +3998,8 @@ def Raise_get_type(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Raise' object has no attribute 'type'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'type'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.type) @@ -3971,7 +4009,8 @@ def Raise_get_inst(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Raise' object has no attribute 'inst'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'inst'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.inst) @@ -3981,7 +4020,8 @@ def Raise_get_tback(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Raise' object has no attribute 'tback'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'tback'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.tback) @@ -4017,7 +4057,8 @@ def TryExcept_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'TryExcept' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -4034,7 +4075,8 @@ def TryExcept_get_handlers(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'TryExcept' object has no attribute 'handlers'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'handlers'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_handlers is None: if w_self.handlers is None: @@ -4051,7 +4093,8 @@ def TryExcept_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'TryExcept' object has no attribute 'orelse'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_orelse is None: if w_self.orelse is None: @@ -4097,7 +4140,8 @@ def TryFinally_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'TryFinally' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -4114,7 +4158,8 @@ def TryFinally_get_finalbody(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'TryFinally' object has no attribute 'finalbody'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'finalbody'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_finalbody is None: if w_self.finalbody is None: @@ -4158,7 +4203,8 @@ def Assert_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Assert' object has no attribute 'test'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'test'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -4168,7 +4214,8 @@ def Assert_get_msg(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Assert' object has no attribute 'msg'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'msg'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.msg) @@ -4203,7 +4250,8 @@ def Import_get_names(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Import' object has no attribute 'names'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'names'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4245,7 +4293,8 @@ def ImportFrom_get_module(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'ImportFrom' object has no attribute 'module'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'module'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.module) @@ -4258,7 +4307,8 @@ def ImportFrom_get_names(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'ImportFrom' object has no attribute 'names'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'names'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4275,7 +4325,8 @@ def ImportFrom_get_level(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'ImportFrom' object has no attribute 'level'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'level'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.level) @@ -4312,7 +4363,8 @@ def Exec_get_body(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Exec' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4322,7 +4374,8 @@ def Exec_get_globals(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Exec' object has no attribute 'globals'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'globals'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.globals) @@ -4332,7 +4385,8 @@ def Exec_get_locals(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Exec' object has no attribute 'locals'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'locals'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.locals) @@ -4368,7 +4422,8 @@ def Global_get_names(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Global' object has no attribute 'names'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'names'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_names is None: if w_self.names is None: @@ -4410,7 +4465,8 @@ def Expr_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Expr' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -4495,7 +4551,8 @@ def expr_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("'expr' object has no attribute 'lineno'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -4505,7 +4562,8 @@ def expr_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("'expr' object has no attribute 'col_offset'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -4523,7 +4581,8 @@ def BoolOp_get_op(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'BoolOp' object has no attribute 'op'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'op'" % typename) raise OperationError(space.w_AttributeError, w_err) return boolop_to_class[w_self.op - 1]() @@ -4534,7 +4593,8 @@ def BoolOp_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'BoolOp' object has no attribute 'values'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'values'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -4577,7 +4637,8 @@ def BinOp_get_left(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'BinOp' object has no attribute 'left'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'left'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.left) @@ -4587,7 +4648,8 @@ def BinOp_get_op(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'BinOp' object has no attribute 'op'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'op'" % typename) raise OperationError(space.w_AttributeError, w_err) return operator_to_class[w_self.op - 1]() @@ -4598,7 +4660,8 @@ def BinOp_get_right(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'BinOp' object has no attribute 'right'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'right'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.right) @@ -4634,7 +4697,8 @@ def UnaryOp_get_op(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'UnaryOp' object has no attribute 'op'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'op'" % typename) raise OperationError(space.w_AttributeError, w_err) return unaryop_to_class[w_self.op - 1]() @@ -4645,7 +4709,8 @@ def UnaryOp_get_operand(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'UnaryOp' object has no attribute 'operand'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'operand'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.operand) @@ -4680,7 +4745,8 @@ def Lambda_get_args(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Lambda' object has no attribute 'args'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'args'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.args) @@ -4690,7 +4756,8 @@ def Lambda_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Lambda' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4725,7 +4792,8 @@ def IfExp_get_test(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'IfExp' object has no attribute 'test'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'test'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.test) @@ -4735,7 +4803,8 @@ def IfExp_get_body(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'IfExp' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.body) @@ -4745,7 +4814,8 @@ def IfExp_get_orelse(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'IfExp' object has no attribute 'orelse'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.orelse) @@ -4781,7 +4851,8 @@ def Dict_get_keys(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Dict' object has no attribute 'keys'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'keys'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_keys is None: if w_self.keys is None: @@ -4798,7 +4869,8 @@ def Dict_get_values(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Dict' object has no attribute 'values'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'values'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_values is None: if w_self.values is None: @@ -4842,7 +4914,8 @@ def Set_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Set' object has no attribute 'elts'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elts'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -4884,7 +4957,8 @@ def ListComp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'ListComp' object has no attribute 'elt'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -4894,7 +4968,8 @@ def ListComp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'ListComp' object has no attribute 'generators'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'generators'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -4937,7 +5012,8 @@ def SetComp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'SetComp' object has no attribute 'elt'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -4947,7 +5023,8 @@ def SetComp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'SetComp' object has no attribute 'generators'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'generators'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -4990,7 +5067,8 @@ def DictComp_get_key(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'DictComp' object has no attribute 'key'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'key'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.key) @@ -5000,7 +5078,8 @@ def DictComp_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'DictComp' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5010,7 +5089,8 @@ def DictComp_get_generators(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'DictComp' object has no attribute 'generators'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'generators'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -5054,7 +5134,8 @@ def GeneratorExp_get_elt(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'GeneratorExp' object has no attribute 'elt'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.elt) @@ -5064,7 +5145,8 @@ def GeneratorExp_get_generators(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'GeneratorExp' object has no attribute 'generators'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'generators'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_generators is None: if w_self.generators is None: @@ -5107,7 +5189,8 @@ def Yield_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Yield' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5141,7 +5224,8 @@ def Compare_get_left(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Compare' object has no attribute 'left'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'left'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.left) @@ -5151,7 +5235,8 @@ def Compare_get_ops(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Compare' object has no attribute 'ops'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ops'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_ops is None: if w_self.ops is None: @@ -5168,7 +5253,8 @@ def Compare_get_comparators(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Compare' object has no attribute 'comparators'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'comparators'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_comparators is None: if w_self.comparators is None: @@ -5213,7 +5299,8 @@ def Call_get_func(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Call' object has no attribute 'func'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'func'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.func) @@ -5223,7 +5310,8 @@ def Call_get_args(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Call' object has no attribute 'args'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'args'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_args is None: if w_self.args is None: @@ -5240,7 +5328,8 @@ def Call_get_keywords(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Call' object has no attribute 'keywords'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'keywords'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_keywords is None: if w_self.keywords is None: @@ -5257,7 +5346,8 @@ def Call_get_starargs(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("'Call' object has no attribute 'starargs'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'starargs'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.starargs) @@ -5267,7 +5357,8 @@ def Call_get_kwargs(space, w_self): if not w_self.initialization_state & 16: - w_err = space.wrap("'Call' object has no attribute 'kwargs'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'kwargs'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.kwargs) @@ -5307,7 +5398,8 @@ def Repr_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Repr' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5341,7 +5433,8 @@ def Num_get_n(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Num' object has no attribute 'n'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'n'" % typename) raise OperationError(space.w_AttributeError, w_err) return w_self.n @@ -5375,7 +5468,8 @@ def Str_get_s(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Str' object has no attribute 's'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 's'" % typename) raise OperationError(space.w_AttributeError, w_err) return w_self.s @@ -5412,7 +5506,8 @@ def Attribute_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Attribute' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5422,7 +5517,8 @@ def Attribute_get_attr(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Attribute' object has no attribute 'attr'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'attr'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.attr) @@ -5432,7 +5528,8 @@ def Attribute_get_ctx(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Attribute' object has no attribute 'ctx'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5469,7 +5566,8 @@ def Subscript_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Subscript' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -5479,7 +5577,8 @@ def Subscript_get_slice(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Subscript' object has no attribute 'slice'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'slice'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.slice) @@ -5489,7 +5588,8 @@ def Subscript_get_ctx(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Subscript' object has no attribute 'ctx'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5526,7 +5626,8 @@ def Name_get_id(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Name' object has no attribute 'id'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'id'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.id) @@ -5536,7 +5637,8 @@ def Name_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Name' object has no attribute 'ctx'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5572,7 +5674,8 @@ def List_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'List' object has no attribute 'elts'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elts'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -5589,7 +5692,8 @@ def List_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'List' object has no attribute 'ctx'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5626,7 +5730,8 @@ def Tuple_get_elts(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Tuple' object has no attribute 'elts'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'elts'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_elts is None: if w_self.elts is None: @@ -5643,7 +5748,8 @@ def Tuple_get_ctx(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Tuple' object has no attribute 'ctx'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) raise OperationError(space.w_AttributeError, w_err) return expr_context_to_class[w_self.ctx - 1]() @@ -5680,7 +5786,8 @@ def Const_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Const' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return w_self.value @@ -5785,7 +5892,8 @@ def Slice_get_lower(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Slice' object has no attribute 'lower'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'lower'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lower) @@ -5795,7 +5903,8 @@ def Slice_get_upper(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'Slice' object has no attribute 'upper'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'upper'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.upper) @@ -5805,7 +5914,8 @@ def Slice_get_step(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'Slice' object has no attribute 'step'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'step'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.step) @@ -5841,7 +5951,8 @@ def ExtSlice_get_dims(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'ExtSlice' object has no attribute 'dims'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'dims'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_dims is None: if w_self.dims is None: @@ -5883,7 +5994,8 @@ def Index_get_value(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'Index' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -6137,7 +6249,8 @@ def comprehension_get_target(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'comprehension' object has no attribute 'target'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'target'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.target) @@ -6147,7 +6260,8 @@ def comprehension_get_iter(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'comprehension' object has no attribute 'iter'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'iter'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.iter) @@ -6157,7 +6271,8 @@ def comprehension_get_ifs(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'comprehension' object has no attribute 'ifs'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'ifs'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_ifs is None: if w_self.ifs is None: @@ -6201,7 +6316,8 @@ def excepthandler_get_lineno(space, w_self): if not w_self.initialization_state & w_self._lineno_mask: - w_err = space.wrap("'excepthandler' object has no attribute 'lineno'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.lineno) @@ -6211,7 +6327,8 @@ def excepthandler_get_col_offset(space, w_self): if not w_self.initialization_state & w_self._col_offset_mask: - w_err = space.wrap("'excepthandler' object has no attribute 'col_offset'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.col_offset) @@ -6229,7 +6346,8 @@ def ExceptHandler_get_type(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'ExceptHandler' object has no attribute 'type'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'type'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.type) @@ -6239,7 +6357,8 @@ def ExceptHandler_get_name(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'ExceptHandler' object has no attribute 'name'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'name'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -6249,7 +6368,8 @@ def ExceptHandler_get_body(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'ExceptHandler' object has no attribute 'body'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'body'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_body is None: if w_self.body is None: @@ -6293,7 +6413,8 @@ def arguments_get_args(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'arguments' object has no attribute 'args'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'args'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_args is None: if w_self.args is None: @@ -6310,7 +6431,8 @@ def arguments_get_vararg(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'arguments' object has no attribute 'vararg'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'vararg'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.vararg) @@ -6323,7 +6445,8 @@ def arguments_get_kwarg(space, w_self): if not w_self.initialization_state & 4: - w_err = space.wrap("'arguments' object has no attribute 'kwarg'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'kwarg'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.kwarg) @@ -6336,7 +6459,8 @@ def arguments_get_defaults(space, w_self): if not w_self.initialization_state & 8: - w_err = space.wrap("'arguments' object has no attribute 'defaults'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'defaults'" % typename) raise OperationError(space.w_AttributeError, w_err) if w_self.w_defaults is None: if w_self.defaults is None: @@ -6382,7 +6506,8 @@ def keyword_get_arg(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'keyword' object has no attribute 'arg'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'arg'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.arg) @@ -6392,7 +6517,8 @@ def keyword_get_value(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'keyword' object has no attribute 'value'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'value'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.value) @@ -6427,7 +6553,8 @@ def alias_get_name(space, w_self): if not w_self.initialization_state & 1: - w_err = space.wrap("'alias' object has no attribute 'name'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'name'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.name) @@ -6437,7 +6564,8 @@ def alias_get_asname(space, w_self): if not w_self.initialization_state & 2: - w_err = space.wrap("'alias' object has no attribute 'asname'") + typename = space.type(w_self).getname(space) + w_err = space.wrap("'%s' object has no attribute 'asname'" % typename) raise OperationError(space.w_AttributeError, w_err) return space.wrap(w_self.asname) diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -205,29 +205,19 @@ exc = raises(AttributeError, getattr, x, 'n') assert exc.value.args[0] == "'Num' object has no attribute 'n'" + x = ast.Num(42) + assert x.n == 42 + exc = raises(AttributeError, getattr, x, 'lineno') + assert exc.value.args[0] == "'Num' object has no attribute 'lineno'" + skip("WIP") - x = ast.Num(42) - self.assertEquals(x.n, 42) - try: - x.lineno - except AttributeError, e: - self.assertEquals(e.args[0], - "'Num' object has no attribute 'lineno'") - else: - self.assert_(False) - y = ast.Num() x.lineno = y - self.assertEquals(x.lineno, y) + assert x.lineno == y - try: - x.foobar - except AttributeError, e: - self.assertEquals(e.args[0], - "'Num' object has no attribute 'foobar'") - else: - self.assert_(False) + exc = raises(AttributeError, getattr, x, 'foobar') + assert exc.value.args[0] == "'Num' object has no attribute 'foobar'" x = ast.Num(lineno=2) self.assertEquals(x.lineno, 2) From commits-noreply at bitbucket.org Fri Mar 11 18:21:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 18:21:01 +0100 (CET) Subject: [pypy-svn] pypy default: Allow any type to be stored in ast attributes. Message-ID: <20110311172101.DC032282BAA@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42522:16399dbda102 Date: 2011-03-11 18:12 +0100 http://bitbucket.org/pypy/pypy/changeset/16399dbda102/ Log: Allow any type to be stored in ast attributes. If the type does not match the expectation, the object is stored in the __dict__ diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -428,6 +428,11 @@ flag = "w_self._%s_mask" % (field.name,) else: flag = self.data.field_masks[field] + if not field.seq: + self.emit("if getattr(w_self, 'w_dict', None):", 1) + self.emit(" w_obj = w_self.getdictvalue(space, '%s')" % (field.name,), 1) + self.emit(" if w_obj is not None:", 1) + self.emit(" return w_obj", 1) self.emit("if not w_self.initialization_state & %s:" % (flag,), 1) self.emit("typename = space.type(w_self).getname(space)", 2) self.emit("w_err = space.wrap(\"'%%s' object has no attribute '%s'\" %% typename)" % @@ -460,38 +465,38 @@ self.emit(func) if field.seq: self.emit("w_self.w_%s = w_new_value" % (field.name,), 1) - elif field.type.value not in asdl.builtin_types: - # These are always other AST nodes. - if field.type.value in self.data.simple_types: - self.emit("obj = space.interp_w(%s, w_new_value)" % \ - (field.type,), 1) - self.emit("w_self.%s = obj.to_simple_int(space)" % - (field.name,), 1) + else: + self.emit("try:", 1) + if field.type.value not in asdl.builtin_types: + # These are always other AST nodes. + if field.type.value in self.data.simple_types: + self.emit("obj = space.interp_w(%s, w_new_value)" % \ + (field.type,), 2) + self.emit("w_self.%s = obj.to_simple_int(space)" % + (field.name,), 2) + else: + config = (field.name, field.type, repr(field.opt)) + self.emit("w_self.%s = space.interp_w(%s, w_new_value, %s)" % + config, 2) else: - config = (field.name, field.type, repr(field.opt)) - self.emit("w_self.%s = space.interp_w(%s, w_new_value, %s)" % - config, 1) - else: - level = 1 - if field.opt and field.type.value != "int": - self.emit("if space.is_w(w_new_value, space.w_None):", 1) - self.emit("w_self.%s = None" % (field.name,), 2) - level += 1 - self.emit("else:", 1) - if field.type.value == "object": - self.emit("w_self.%s = w_new_value" % (field.name,), level) - elif field.type.value == "string": - self.emit("if not space.is_true(space.isinstance(" \ - "w_new_value, space.w_basestring)):", level) - line = "w_err = space.wrap(\"some kind of string required\")" - self.emit(line, level + 1) - self.emit("raise OperationError(space.w_TypeError, w_err)", - level + 1) - self.emit("w_self.%s = w_new_value" % (field.name,), level) - else: - space_method = asdl_type_map[field.type.value] - config = (field.name, space_method) - self.emit("w_self.%s = space.%s(w_new_value)" % config, level) + level = 2 + if field.opt and field.type.value != "int": + self.emit("if space.is_w(w_new_value, space.w_None):", 2) + self.emit("w_self.%s = None" % (field.name,), 3) + level += 1 + self.emit("else:", 2) + if field.type.value in ("object", "string"): + self.emit("w_self.%s = w_new_value" % (field.name,), level) + else: + space_method = asdl_type_map[field.type.value] + config = (field.name, space_method) + self.emit("w_self.%s = space.%s(w_new_value)" % config, level) + self.emit("except OperationError, e:", 1) + self.emit(" if not e.match(space, space.w_TypeError):", 1) + self.emit(" raise", 1) + self.emit(" w_self.setdictvalue(space, '%s', w_new_value)" + % (field.name,), 1) + self.emit(" return", 1) self.emit("w_self.initialization_state |= %s" % (flag,), 1) self.emit("") diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -3143,6 +3143,10 @@ Interactive.typedef.acceptable_as_base_class = False def Expression_get_body(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'body') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'body'" % typename) @@ -3150,7 +3154,13 @@ return space.wrap(w_self.body) def Expression_set_body(space, w_self, w_new_value): - w_self.body = space.interp_w(expr, w_new_value, False) + try: + w_self.body = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'body', w_new_value) + return w_self.initialization_state |= 1 _Expression_field_unroller = unrolling_iterable(['body']) @@ -3221,6 +3231,10 @@ Suite.typedef.acceptable_as_base_class = False def stmt_get_lineno(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'lineno') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._lineno_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) @@ -3228,10 +3242,20 @@ return space.wrap(w_self.lineno) def stmt_set_lineno(space, w_self, w_new_value): - w_self.lineno = space.int_w(w_new_value) + try: + w_self.lineno = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'lineno', w_new_value) + return w_self.initialization_state |= w_self._lineno_mask def stmt_get_col_offset(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'col_offset') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._col_offset_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) @@ -3239,7 +3263,13 @@ return space.wrap(w_self.col_offset) def stmt_set_col_offset(space, w_self, w_new_value): - w_self.col_offset = space.int_w(w_new_value) + try: + w_self.col_offset = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'col_offset', w_new_value) + return w_self.initialization_state |= w_self._col_offset_mask stmt.typedef = typedef.TypeDef("stmt", @@ -3251,6 +3281,10 @@ stmt.typedef.acceptable_as_base_class = False def FunctionDef_get_name(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'name') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'name'" % typename) @@ -3258,10 +3292,20 @@ return space.wrap(w_self.name) def FunctionDef_set_name(space, w_self, w_new_value): - w_self.name = space.str_w(w_new_value) + try: + w_self.name = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'name', w_new_value) + return w_self.initialization_state |= 1 def FunctionDef_get_args(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'args') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'args'" % typename) @@ -3269,7 +3313,13 @@ return space.wrap(w_self.args) def FunctionDef_set_args(space, w_self, w_new_value): - w_self.args = space.interp_w(arguments, w_new_value, False) + try: + w_self.args = space.interp_w(arguments, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'args', w_new_value) + return w_self.initialization_state |= 2 def FunctionDef_get_body(space, w_self): @@ -3338,6 +3388,10 @@ FunctionDef.typedef.acceptable_as_base_class = False def ClassDef_get_name(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'name') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'name'" % typename) @@ -3345,7 +3399,13 @@ return space.wrap(w_self.name) def ClassDef_set_name(space, w_self, w_new_value): - w_self.name = space.str_w(w_new_value) + try: + w_self.name = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'name', w_new_value) + return w_self.initialization_state |= 1 def ClassDef_get_bases(space, w_self): @@ -3433,6 +3493,10 @@ ClassDef.typedef.acceptable_as_base_class = False def Return_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -3440,7 +3504,13 @@ return space.wrap(w_self.value) def Return_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, True) + try: + w_self.value = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Return_field_unroller = unrolling_iterable(['value']) @@ -3529,6 +3599,10 @@ w_self.initialization_state |= 1 def Assign_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -3536,7 +3610,13 @@ return space.wrap(w_self.value) def Assign_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 2 _Assign_field_unroller = unrolling_iterable(['targets', 'value']) @@ -3566,6 +3646,10 @@ Assign.typedef.acceptable_as_base_class = False def AugAssign_get_target(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'target') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'target'" % typename) @@ -3573,10 +3657,20 @@ return space.wrap(w_self.target) def AugAssign_set_target(space, w_self, w_new_value): - w_self.target = space.interp_w(expr, w_new_value, False) + try: + w_self.target = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'target', w_new_value) + return w_self.initialization_state |= 1 def AugAssign_get_op(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'op') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'op'" % typename) @@ -3584,11 +3678,21 @@ return operator_to_class[w_self.op - 1]() def AugAssign_set_op(space, w_self, w_new_value): - obj = space.interp_w(operator, w_new_value) - w_self.op = obj.to_simple_int(space) + try: + obj = space.interp_w(operator, w_new_value) + w_self.op = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'op', w_new_value) + return w_self.initialization_state |= 2 def AugAssign_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -3596,7 +3700,13 @@ return space.wrap(w_self.value) def AugAssign_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 4 _AugAssign_field_unroller = unrolling_iterable(['target', 'op', 'value']) @@ -3626,6 +3736,10 @@ AugAssign.typedef.acceptable_as_base_class = False def Print_get_dest(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'dest') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'dest'" % typename) @@ -3633,7 +3747,13 @@ return space.wrap(w_self.dest) def Print_set_dest(space, w_self, w_new_value): - w_self.dest = space.interp_w(expr, w_new_value, True) + try: + w_self.dest = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'dest', w_new_value) + return w_self.initialization_state |= 1 def Print_get_values(space, w_self): @@ -3655,6 +3775,10 @@ w_self.initialization_state |= 2 def Print_get_nl(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'nl') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'nl'" % typename) @@ -3662,7 +3786,13 @@ return space.wrap(w_self.nl) def Print_set_nl(space, w_self, w_new_value): - w_self.nl = space.bool_w(w_new_value) + try: + w_self.nl = space.bool_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'nl', w_new_value) + return w_self.initialization_state |= 4 _Print_field_unroller = unrolling_iterable(['dest', 'values', 'nl']) @@ -3693,6 +3823,10 @@ Print.typedef.acceptable_as_base_class = False def For_get_target(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'target') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'target'" % typename) @@ -3700,10 +3834,20 @@ return space.wrap(w_self.target) def For_set_target(space, w_self, w_new_value): - w_self.target = space.interp_w(expr, w_new_value, False) + try: + w_self.target = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'target', w_new_value) + return w_self.initialization_state |= 1 def For_get_iter(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'iter') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'iter'" % typename) @@ -3711,7 +3855,13 @@ return space.wrap(w_self.iter) def For_set_iter(space, w_self, w_new_value): - w_self.iter = space.interp_w(expr, w_new_value, False) + try: + w_self.iter = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'iter', w_new_value) + return w_self.initialization_state |= 2 def For_get_body(space, w_self): @@ -3780,6 +3930,10 @@ For.typedef.acceptable_as_base_class = False def While_get_test(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'test') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'test'" % typename) @@ -3787,7 +3941,13 @@ return space.wrap(w_self.test) def While_set_test(space, w_self, w_new_value): - w_self.test = space.interp_w(expr, w_new_value, False) + try: + w_self.test = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'test', w_new_value) + return w_self.initialization_state |= 1 def While_get_body(space, w_self): @@ -3855,6 +4015,10 @@ While.typedef.acceptable_as_base_class = False def If_get_test(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'test') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'test'" % typename) @@ -3862,7 +4026,13 @@ return space.wrap(w_self.test) def If_set_test(space, w_self, w_new_value): - w_self.test = space.interp_w(expr, w_new_value, False) + try: + w_self.test = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'test', w_new_value) + return w_self.initialization_state |= 1 def If_get_body(space, w_self): @@ -3930,6 +4100,10 @@ If.typedef.acceptable_as_base_class = False def With_get_context_expr(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'context_expr') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'context_expr'" % typename) @@ -3937,10 +4111,20 @@ return space.wrap(w_self.context_expr) def With_set_context_expr(space, w_self, w_new_value): - w_self.context_expr = space.interp_w(expr, w_new_value, False) + try: + w_self.context_expr = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'context_expr', w_new_value) + return w_self.initialization_state |= 1 def With_get_optional_vars(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'optional_vars') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'optional_vars'" % typename) @@ -3948,7 +4132,13 @@ return space.wrap(w_self.optional_vars) def With_set_optional_vars(space, w_self, w_new_value): - w_self.optional_vars = space.interp_w(expr, w_new_value, True) + try: + w_self.optional_vars = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'optional_vars', w_new_value) + return w_self.initialization_state |= 2 def With_get_body(space, w_self): @@ -3997,6 +4187,10 @@ With.typedef.acceptable_as_base_class = False def Raise_get_type(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'type') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'type'" % typename) @@ -4004,10 +4198,20 @@ return space.wrap(w_self.type) def Raise_set_type(space, w_self, w_new_value): - w_self.type = space.interp_w(expr, w_new_value, True) + try: + w_self.type = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'type', w_new_value) + return w_self.initialization_state |= 1 def Raise_get_inst(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'inst') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'inst'" % typename) @@ -4015,10 +4219,20 @@ return space.wrap(w_self.inst) def Raise_set_inst(space, w_self, w_new_value): - w_self.inst = space.interp_w(expr, w_new_value, True) + try: + w_self.inst = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'inst', w_new_value) + return w_self.initialization_state |= 2 def Raise_get_tback(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'tback') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'tback'" % typename) @@ -4026,7 +4240,13 @@ return space.wrap(w_self.tback) def Raise_set_tback(space, w_self, w_new_value): - w_self.tback = space.interp_w(expr, w_new_value, True) + try: + w_self.tback = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'tback', w_new_value) + return w_self.initialization_state |= 4 _Raise_field_unroller = unrolling_iterable(['type', 'inst', 'tback']) @@ -4202,6 +4422,10 @@ TryFinally.typedef.acceptable_as_base_class = False def Assert_get_test(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'test') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'test'" % typename) @@ -4209,10 +4433,20 @@ return space.wrap(w_self.test) def Assert_set_test(space, w_self, w_new_value): - w_self.test = space.interp_w(expr, w_new_value, False) + try: + w_self.test = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'test', w_new_value) + return w_self.initialization_state |= 1 def Assert_get_msg(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'msg') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'msg'" % typename) @@ -4220,7 +4454,13 @@ return space.wrap(w_self.msg) def Assert_set_msg(space, w_self, w_new_value): - w_self.msg = space.interp_w(expr, w_new_value, True) + try: + w_self.msg = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'msg', w_new_value) + return w_self.initialization_state |= 2 _Assert_field_unroller = unrolling_iterable(['test', 'msg']) @@ -4292,6 +4532,10 @@ Import.typedef.acceptable_as_base_class = False def ImportFrom_get_module(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'module') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'module'" % typename) @@ -4299,10 +4543,16 @@ return space.wrap(w_self.module) def ImportFrom_set_module(space, w_self, w_new_value): - if space.is_w(w_new_value, space.w_None): - w_self.module = None - else: - w_self.module = space.str_w(w_new_value) + try: + if space.is_w(w_new_value, space.w_None): + w_self.module = None + else: + w_self.module = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'module', w_new_value) + return w_self.initialization_state |= 1 def ImportFrom_get_names(space, w_self): @@ -4324,6 +4574,10 @@ w_self.initialization_state |= 2 def ImportFrom_get_level(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'level') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'level'" % typename) @@ -4331,7 +4585,13 @@ return space.wrap(w_self.level) def ImportFrom_set_level(space, w_self, w_new_value): - w_self.level = space.int_w(w_new_value) + try: + w_self.level = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'level', w_new_value) + return w_self.initialization_state |= 4 _ImportFrom_field_unroller = unrolling_iterable(['module', 'names', 'level']) @@ -4362,6 +4622,10 @@ ImportFrom.typedef.acceptable_as_base_class = False def Exec_get_body(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'body') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'body'" % typename) @@ -4369,10 +4633,20 @@ return space.wrap(w_self.body) def Exec_set_body(space, w_self, w_new_value): - w_self.body = space.interp_w(expr, w_new_value, False) + try: + w_self.body = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'body', w_new_value) + return w_self.initialization_state |= 1 def Exec_get_globals(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'globals') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'globals'" % typename) @@ -4380,10 +4654,20 @@ return space.wrap(w_self.globals) def Exec_set_globals(space, w_self, w_new_value): - w_self.globals = space.interp_w(expr, w_new_value, True) + try: + w_self.globals = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'globals', w_new_value) + return w_self.initialization_state |= 2 def Exec_get_locals(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'locals') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'locals'" % typename) @@ -4391,7 +4675,13 @@ return space.wrap(w_self.locals) def Exec_set_locals(space, w_self, w_new_value): - w_self.locals = space.interp_w(expr, w_new_value, True) + try: + w_self.locals = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'locals', w_new_value) + return w_self.initialization_state |= 4 _Exec_field_unroller = unrolling_iterable(['body', 'globals', 'locals']) @@ -4464,6 +4754,10 @@ Global.typedef.acceptable_as_base_class = False def Expr_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -4471,7 +4765,13 @@ return space.wrap(w_self.value) def Expr_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Expr_field_unroller = unrolling_iterable(['value']) @@ -4550,6 +4850,10 @@ Continue.typedef.acceptable_as_base_class = False def expr_get_lineno(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'lineno') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._lineno_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) @@ -4557,10 +4861,20 @@ return space.wrap(w_self.lineno) def expr_set_lineno(space, w_self, w_new_value): - w_self.lineno = space.int_w(w_new_value) + try: + w_self.lineno = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'lineno', w_new_value) + return w_self.initialization_state |= w_self._lineno_mask def expr_get_col_offset(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'col_offset') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._col_offset_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) @@ -4568,7 +4882,13 @@ return space.wrap(w_self.col_offset) def expr_set_col_offset(space, w_self, w_new_value): - w_self.col_offset = space.int_w(w_new_value) + try: + w_self.col_offset = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'col_offset', w_new_value) + return w_self.initialization_state |= w_self._col_offset_mask expr.typedef = typedef.TypeDef("expr", @@ -4580,6 +4900,10 @@ expr.typedef.acceptable_as_base_class = False def BoolOp_get_op(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'op') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'op'" % typename) @@ -4587,8 +4911,14 @@ return boolop_to_class[w_self.op - 1]() def BoolOp_set_op(space, w_self, w_new_value): - obj = space.interp_w(boolop, w_new_value) - w_self.op = obj.to_simple_int(space) + try: + obj = space.interp_w(boolop, w_new_value) + w_self.op = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'op', w_new_value) + return w_self.initialization_state |= 1 def BoolOp_get_values(space, w_self): @@ -4636,6 +4966,10 @@ BoolOp.typedef.acceptable_as_base_class = False def BinOp_get_left(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'left') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'left'" % typename) @@ -4643,10 +4977,20 @@ return space.wrap(w_self.left) def BinOp_set_left(space, w_self, w_new_value): - w_self.left = space.interp_w(expr, w_new_value, False) + try: + w_self.left = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'left', w_new_value) + return w_self.initialization_state |= 1 def BinOp_get_op(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'op') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'op'" % typename) @@ -4654,11 +4998,21 @@ return operator_to_class[w_self.op - 1]() def BinOp_set_op(space, w_self, w_new_value): - obj = space.interp_w(operator, w_new_value) - w_self.op = obj.to_simple_int(space) + try: + obj = space.interp_w(operator, w_new_value) + w_self.op = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'op', w_new_value) + return w_self.initialization_state |= 2 def BinOp_get_right(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'right') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'right'" % typename) @@ -4666,7 +5020,13 @@ return space.wrap(w_self.right) def BinOp_set_right(space, w_self, w_new_value): - w_self.right = space.interp_w(expr, w_new_value, False) + try: + w_self.right = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'right', w_new_value) + return w_self.initialization_state |= 4 _BinOp_field_unroller = unrolling_iterable(['left', 'op', 'right']) @@ -4696,6 +5056,10 @@ BinOp.typedef.acceptable_as_base_class = False def UnaryOp_get_op(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'op') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'op'" % typename) @@ -4703,11 +5067,21 @@ return unaryop_to_class[w_self.op - 1]() def UnaryOp_set_op(space, w_self, w_new_value): - obj = space.interp_w(unaryop, w_new_value) - w_self.op = obj.to_simple_int(space) + try: + obj = space.interp_w(unaryop, w_new_value) + w_self.op = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'op', w_new_value) + return w_self.initialization_state |= 1 def UnaryOp_get_operand(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'operand') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'operand'" % typename) @@ -4715,7 +5089,13 @@ return space.wrap(w_self.operand) def UnaryOp_set_operand(space, w_self, w_new_value): - w_self.operand = space.interp_w(expr, w_new_value, False) + try: + w_self.operand = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'operand', w_new_value) + return w_self.initialization_state |= 2 _UnaryOp_field_unroller = unrolling_iterable(['op', 'operand']) @@ -4744,6 +5124,10 @@ UnaryOp.typedef.acceptable_as_base_class = False def Lambda_get_args(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'args') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'args'" % typename) @@ -4751,10 +5135,20 @@ return space.wrap(w_self.args) def Lambda_set_args(space, w_self, w_new_value): - w_self.args = space.interp_w(arguments, w_new_value, False) + try: + w_self.args = space.interp_w(arguments, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'args', w_new_value) + return w_self.initialization_state |= 1 def Lambda_get_body(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'body') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'body'" % typename) @@ -4762,7 +5156,13 @@ return space.wrap(w_self.body) def Lambda_set_body(space, w_self, w_new_value): - w_self.body = space.interp_w(expr, w_new_value, False) + try: + w_self.body = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'body', w_new_value) + return w_self.initialization_state |= 2 _Lambda_field_unroller = unrolling_iterable(['args', 'body']) @@ -4791,6 +5191,10 @@ Lambda.typedef.acceptable_as_base_class = False def IfExp_get_test(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'test') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'test'" % typename) @@ -4798,10 +5202,20 @@ return space.wrap(w_self.test) def IfExp_set_test(space, w_self, w_new_value): - w_self.test = space.interp_w(expr, w_new_value, False) + try: + w_self.test = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'test', w_new_value) + return w_self.initialization_state |= 1 def IfExp_get_body(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'body') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'body'" % typename) @@ -4809,10 +5223,20 @@ return space.wrap(w_self.body) def IfExp_set_body(space, w_self, w_new_value): - w_self.body = space.interp_w(expr, w_new_value, False) + try: + w_self.body = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'body', w_new_value) + return w_self.initialization_state |= 2 def IfExp_get_orelse(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'orelse') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'orelse'" % typename) @@ -4820,7 +5244,13 @@ return space.wrap(w_self.orelse) def IfExp_set_orelse(space, w_self, w_new_value): - w_self.orelse = space.interp_w(expr, w_new_value, False) + try: + w_self.orelse = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'orelse', w_new_value) + return w_self.initialization_state |= 4 _IfExp_field_unroller = unrolling_iterable(['test', 'body', 'orelse']) @@ -4956,6 +5386,10 @@ Set.typedef.acceptable_as_base_class = False def ListComp_get_elt(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'elt') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) @@ -4963,7 +5397,13 @@ return space.wrap(w_self.elt) def ListComp_set_elt(space, w_self, w_new_value): - w_self.elt = space.interp_w(expr, w_new_value, False) + try: + w_self.elt = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'elt', w_new_value) + return w_self.initialization_state |= 1 def ListComp_get_generators(space, w_self): @@ -5011,6 +5451,10 @@ ListComp.typedef.acceptable_as_base_class = False def SetComp_get_elt(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'elt') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) @@ -5018,7 +5462,13 @@ return space.wrap(w_self.elt) def SetComp_set_elt(space, w_self, w_new_value): - w_self.elt = space.interp_w(expr, w_new_value, False) + try: + w_self.elt = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'elt', w_new_value) + return w_self.initialization_state |= 1 def SetComp_get_generators(space, w_self): @@ -5066,6 +5516,10 @@ SetComp.typedef.acceptable_as_base_class = False def DictComp_get_key(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'key') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'key'" % typename) @@ -5073,10 +5527,20 @@ return space.wrap(w_self.key) def DictComp_set_key(space, w_self, w_new_value): - w_self.key = space.interp_w(expr, w_new_value, False) + try: + w_self.key = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'key', w_new_value) + return w_self.initialization_state |= 1 def DictComp_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5084,7 +5548,13 @@ return space.wrap(w_self.value) def DictComp_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 2 def DictComp_get_generators(space, w_self): @@ -5133,6 +5603,10 @@ DictComp.typedef.acceptable_as_base_class = False def GeneratorExp_get_elt(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'elt') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'elt'" % typename) @@ -5140,7 +5614,13 @@ return space.wrap(w_self.elt) def GeneratorExp_set_elt(space, w_self, w_new_value): - w_self.elt = space.interp_w(expr, w_new_value, False) + try: + w_self.elt = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'elt', w_new_value) + return w_self.initialization_state |= 1 def GeneratorExp_get_generators(space, w_self): @@ -5188,6 +5668,10 @@ GeneratorExp.typedef.acceptable_as_base_class = False def Yield_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5195,7 +5679,13 @@ return space.wrap(w_self.value) def Yield_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, True) + try: + w_self.value = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Yield_field_unroller = unrolling_iterable(['value']) @@ -5223,6 +5713,10 @@ Yield.typedef.acceptable_as_base_class = False def Compare_get_left(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'left') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'left'" % typename) @@ -5230,7 +5724,13 @@ return space.wrap(w_self.left) def Compare_set_left(space, w_self, w_new_value): - w_self.left = space.interp_w(expr, w_new_value, False) + try: + w_self.left = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'left', w_new_value) + return w_self.initialization_state |= 1 def Compare_get_ops(space, w_self): @@ -5298,6 +5798,10 @@ Compare.typedef.acceptable_as_base_class = False def Call_get_func(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'func') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'func'" % typename) @@ -5305,7 +5809,13 @@ return space.wrap(w_self.func) def Call_set_func(space, w_self, w_new_value): - w_self.func = space.interp_w(expr, w_new_value, False) + try: + w_self.func = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'func', w_new_value) + return w_self.initialization_state |= 1 def Call_get_args(space, w_self): @@ -5345,6 +5855,10 @@ w_self.initialization_state |= 4 def Call_get_starargs(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'starargs') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 8: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'starargs'" % typename) @@ -5352,10 +5866,20 @@ return space.wrap(w_self.starargs) def Call_set_starargs(space, w_self, w_new_value): - w_self.starargs = space.interp_w(expr, w_new_value, True) + try: + w_self.starargs = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'starargs', w_new_value) + return w_self.initialization_state |= 8 def Call_get_kwargs(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'kwargs') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 16: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'kwargs'" % typename) @@ -5363,7 +5887,13 @@ return space.wrap(w_self.kwargs) def Call_set_kwargs(space, w_self, w_new_value): - w_self.kwargs = space.interp_w(expr, w_new_value, True) + try: + w_self.kwargs = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'kwargs', w_new_value) + return w_self.initialization_state |= 16 _Call_field_unroller = unrolling_iterable(['func', 'args', 'keywords', 'starargs', 'kwargs']) @@ -5397,6 +5927,10 @@ Call.typedef.acceptable_as_base_class = False def Repr_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5404,7 +5938,13 @@ return space.wrap(w_self.value) def Repr_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Repr_field_unroller = unrolling_iterable(['value']) @@ -5432,6 +5972,10 @@ Repr.typedef.acceptable_as_base_class = False def Num_get_n(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'n') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'n'" % typename) @@ -5439,7 +5983,13 @@ return w_self.n def Num_set_n(space, w_self, w_new_value): - w_self.n = w_new_value + try: + w_self.n = w_new_value + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'n', w_new_value) + return w_self.initialization_state |= 1 _Num_field_unroller = unrolling_iterable(['n']) @@ -5467,6 +6017,10 @@ Num.typedef.acceptable_as_base_class = False def Str_get_s(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 's') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 's'" % typename) @@ -5474,10 +6028,13 @@ return w_self.s def Str_set_s(space, w_self, w_new_value): - if not space.is_true(space.isinstance(w_new_value, space.w_basestring)): - w_err = space.wrap("some kind of string required") - raise OperationError(space.w_TypeError, w_err) - w_self.s = w_new_value + try: + w_self.s = w_new_value + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 's', w_new_value) + return w_self.initialization_state |= 1 _Str_field_unroller = unrolling_iterable(['s']) @@ -5505,6 +6062,10 @@ Str.typedef.acceptable_as_base_class = False def Attribute_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5512,10 +6073,20 @@ return space.wrap(w_self.value) def Attribute_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 def Attribute_get_attr(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'attr') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'attr'" % typename) @@ -5523,10 +6094,20 @@ return space.wrap(w_self.attr) def Attribute_set_attr(space, w_self, w_new_value): - w_self.attr = space.str_w(w_new_value) + try: + w_self.attr = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'attr', w_new_value) + return w_self.initialization_state |= 2 def Attribute_get_ctx(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'ctx') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) @@ -5534,8 +6115,14 @@ return expr_context_to_class[w_self.ctx - 1]() def Attribute_set_ctx(space, w_self, w_new_value): - obj = space.interp_w(expr_context, w_new_value) - w_self.ctx = obj.to_simple_int(space) + try: + obj = space.interp_w(expr_context, w_new_value) + w_self.ctx = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'ctx', w_new_value) + return w_self.initialization_state |= 4 _Attribute_field_unroller = unrolling_iterable(['value', 'attr', 'ctx']) @@ -5565,6 +6152,10 @@ Attribute.typedef.acceptable_as_base_class = False def Subscript_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5572,10 +6163,20 @@ return space.wrap(w_self.value) def Subscript_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 def Subscript_get_slice(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'slice') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'slice'" % typename) @@ -5583,10 +6184,20 @@ return space.wrap(w_self.slice) def Subscript_set_slice(space, w_self, w_new_value): - w_self.slice = space.interp_w(slice, w_new_value, False) + try: + w_self.slice = space.interp_w(slice, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'slice', w_new_value) + return w_self.initialization_state |= 2 def Subscript_get_ctx(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'ctx') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) @@ -5594,8 +6205,14 @@ return expr_context_to_class[w_self.ctx - 1]() def Subscript_set_ctx(space, w_self, w_new_value): - obj = space.interp_w(expr_context, w_new_value) - w_self.ctx = obj.to_simple_int(space) + try: + obj = space.interp_w(expr_context, w_new_value) + w_self.ctx = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'ctx', w_new_value) + return w_self.initialization_state |= 4 _Subscript_field_unroller = unrolling_iterable(['value', 'slice', 'ctx']) @@ -5625,6 +6242,10 @@ Subscript.typedef.acceptable_as_base_class = False def Name_get_id(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'id') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'id'" % typename) @@ -5632,10 +6253,20 @@ return space.wrap(w_self.id) def Name_set_id(space, w_self, w_new_value): - w_self.id = space.str_w(w_new_value) + try: + w_self.id = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'id', w_new_value) + return w_self.initialization_state |= 1 def Name_get_ctx(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'ctx') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) @@ -5643,8 +6274,14 @@ return expr_context_to_class[w_self.ctx - 1]() def Name_set_ctx(space, w_self, w_new_value): - obj = space.interp_w(expr_context, w_new_value) - w_self.ctx = obj.to_simple_int(space) + try: + obj = space.interp_w(expr_context, w_new_value) + w_self.ctx = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'ctx', w_new_value) + return w_self.initialization_state |= 2 _Name_field_unroller = unrolling_iterable(['id', 'ctx']) @@ -5691,6 +6328,10 @@ w_self.initialization_state |= 1 def List_get_ctx(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'ctx') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) @@ -5698,8 +6339,14 @@ return expr_context_to_class[w_self.ctx - 1]() def List_set_ctx(space, w_self, w_new_value): - obj = space.interp_w(expr_context, w_new_value) - w_self.ctx = obj.to_simple_int(space) + try: + obj = space.interp_w(expr_context, w_new_value) + w_self.ctx = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'ctx', w_new_value) + return w_self.initialization_state |= 2 _List_field_unroller = unrolling_iterable(['elts', 'ctx']) @@ -5747,6 +6394,10 @@ w_self.initialization_state |= 1 def Tuple_get_ctx(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'ctx') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'ctx'" % typename) @@ -5754,8 +6405,14 @@ return expr_context_to_class[w_self.ctx - 1]() def Tuple_set_ctx(space, w_self, w_new_value): - obj = space.interp_w(expr_context, w_new_value) - w_self.ctx = obj.to_simple_int(space) + try: + obj = space.interp_w(expr_context, w_new_value) + w_self.ctx = obj.to_simple_int(space) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'ctx', w_new_value) + return w_self.initialization_state |= 2 _Tuple_field_unroller = unrolling_iterable(['elts', 'ctx']) @@ -5785,6 +6442,10 @@ Tuple.typedef.acceptable_as_base_class = False def Const_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -5792,7 +6453,13 @@ return w_self.value def Const_set_value(space, w_self, w_new_value): - w_self.value = w_new_value + try: + w_self.value = w_new_value + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Const_field_unroller = unrolling_iterable(['value']) @@ -5891,6 +6558,10 @@ Ellipsis.typedef.acceptable_as_base_class = False def Slice_get_lower(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'lower') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'lower'" % typename) @@ -5898,10 +6569,20 @@ return space.wrap(w_self.lower) def Slice_set_lower(space, w_self, w_new_value): - w_self.lower = space.interp_w(expr, w_new_value, True) + try: + w_self.lower = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'lower', w_new_value) + return w_self.initialization_state |= 1 def Slice_get_upper(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'upper') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'upper'" % typename) @@ -5909,10 +6590,20 @@ return space.wrap(w_self.upper) def Slice_set_upper(space, w_self, w_new_value): - w_self.upper = space.interp_w(expr, w_new_value, True) + try: + w_self.upper = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'upper', w_new_value) + return w_self.initialization_state |= 2 def Slice_get_step(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'step') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'step'" % typename) @@ -5920,7 +6611,13 @@ return space.wrap(w_self.step) def Slice_set_step(space, w_self, w_new_value): - w_self.step = space.interp_w(expr, w_new_value, True) + try: + w_self.step = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'step', w_new_value) + return w_self.initialization_state |= 4 _Slice_field_unroller = unrolling_iterable(['lower', 'upper', 'step']) @@ -5993,6 +6690,10 @@ ExtSlice.typedef.acceptable_as_base_class = False def Index_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -6000,7 +6701,13 @@ return space.wrap(w_self.value) def Index_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 1 _Index_field_unroller = unrolling_iterable(['value']) @@ -6248,6 +6955,10 @@ _NotIn.typedef.acceptable_as_base_class = False def comprehension_get_target(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'target') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'target'" % typename) @@ -6255,10 +6966,20 @@ return space.wrap(w_self.target) def comprehension_set_target(space, w_self, w_new_value): - w_self.target = space.interp_w(expr, w_new_value, False) + try: + w_self.target = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'target', w_new_value) + return w_self.initialization_state |= 1 def comprehension_get_iter(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'iter') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'iter'" % typename) @@ -6266,7 +6987,13 @@ return space.wrap(w_self.iter) def comprehension_set_iter(space, w_self, w_new_value): - w_self.iter = space.interp_w(expr, w_new_value, False) + try: + w_self.iter = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'iter', w_new_value) + return w_self.initialization_state |= 2 def comprehension_get_ifs(space, w_self): @@ -6315,6 +7042,10 @@ comprehension.typedef.acceptable_as_base_class = False def excepthandler_get_lineno(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'lineno') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._lineno_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'lineno'" % typename) @@ -6322,10 +7053,20 @@ return space.wrap(w_self.lineno) def excepthandler_set_lineno(space, w_self, w_new_value): - w_self.lineno = space.int_w(w_new_value) + try: + w_self.lineno = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'lineno', w_new_value) + return w_self.initialization_state |= w_self._lineno_mask def excepthandler_get_col_offset(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'col_offset') + if w_obj is not None: + return w_obj if not w_self.initialization_state & w_self._col_offset_mask: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'col_offset'" % typename) @@ -6333,7 +7074,13 @@ return space.wrap(w_self.col_offset) def excepthandler_set_col_offset(space, w_self, w_new_value): - w_self.col_offset = space.int_w(w_new_value) + try: + w_self.col_offset = space.int_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'col_offset', w_new_value) + return w_self.initialization_state |= w_self._col_offset_mask excepthandler.typedef = typedef.TypeDef("excepthandler", @@ -6345,6 +7092,10 @@ excepthandler.typedef.acceptable_as_base_class = False def ExceptHandler_get_type(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'type') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'type'" % typename) @@ -6352,10 +7103,20 @@ return space.wrap(w_self.type) def ExceptHandler_set_type(space, w_self, w_new_value): - w_self.type = space.interp_w(expr, w_new_value, True) + try: + w_self.type = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'type', w_new_value) + return w_self.initialization_state |= 1 def ExceptHandler_get_name(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'name') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'name'" % typename) @@ -6363,7 +7124,13 @@ return space.wrap(w_self.name) def ExceptHandler_set_name(space, w_self, w_new_value): - w_self.name = space.interp_w(expr, w_new_value, True) + try: + w_self.name = space.interp_w(expr, w_new_value, True) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'name', w_new_value) + return w_self.initialization_state |= 2 def ExceptHandler_get_body(space, w_self): @@ -6430,6 +7197,10 @@ w_self.initialization_state |= 1 def arguments_get_vararg(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'vararg') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'vararg'" % typename) @@ -6437,13 +7208,23 @@ return space.wrap(w_self.vararg) def arguments_set_vararg(space, w_self, w_new_value): - if space.is_w(w_new_value, space.w_None): - w_self.vararg = None - else: - w_self.vararg = space.str_w(w_new_value) + try: + if space.is_w(w_new_value, space.w_None): + w_self.vararg = None + else: + w_self.vararg = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'vararg', w_new_value) + return w_self.initialization_state |= 2 def arguments_get_kwarg(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'kwarg') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 4: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'kwarg'" % typename) @@ -6451,10 +7232,16 @@ return space.wrap(w_self.kwarg) def arguments_set_kwarg(space, w_self, w_new_value): - if space.is_w(w_new_value, space.w_None): - w_self.kwarg = None - else: - w_self.kwarg = space.str_w(w_new_value) + try: + if space.is_w(w_new_value, space.w_None): + w_self.kwarg = None + else: + w_self.kwarg = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'kwarg', w_new_value) + return w_self.initialization_state |= 4 def arguments_get_defaults(space, w_self): @@ -6505,6 +7292,10 @@ arguments.typedef.acceptable_as_base_class = False def keyword_get_arg(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'arg') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'arg'" % typename) @@ -6512,10 +7303,20 @@ return space.wrap(w_self.arg) def keyword_set_arg(space, w_self, w_new_value): - w_self.arg = space.str_w(w_new_value) + try: + w_self.arg = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'arg', w_new_value) + return w_self.initialization_state |= 1 def keyword_get_value(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'value') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'value'" % typename) @@ -6523,7 +7324,13 @@ return space.wrap(w_self.value) def keyword_set_value(space, w_self, w_new_value): - w_self.value = space.interp_w(expr, w_new_value, False) + try: + w_self.value = space.interp_w(expr, w_new_value, False) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'value', w_new_value) + return w_self.initialization_state |= 2 _keyword_field_unroller = unrolling_iterable(['arg', 'value']) @@ -6552,6 +7359,10 @@ keyword.typedef.acceptable_as_base_class = False def alias_get_name(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'name') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 1: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'name'" % typename) @@ -6559,10 +7370,20 @@ return space.wrap(w_self.name) def alias_set_name(space, w_self, w_new_value): - w_self.name = space.str_w(w_new_value) + try: + w_self.name = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'name', w_new_value) + return w_self.initialization_state |= 1 def alias_get_asname(space, w_self): + if getattr(w_self, 'w_dict', None): + w_obj = w_self.getdictvalue(space, 'asname') + if w_obj is not None: + return w_obj if not w_self.initialization_state & 2: typename = space.type(w_self).getname(space) w_err = space.wrap("'%s' object has no attribute 'asname'" % typename) @@ -6570,10 +7391,16 @@ return space.wrap(w_self.asname) def alias_set_asname(space, w_self, w_new_value): - if space.is_w(w_new_value, space.w_None): - w_self.asname = None - else: - w_self.asname = space.str_w(w_new_value) + try: + if space.is_w(w_new_value, space.w_None): + w_self.asname = None + else: + w_self.asname = space.str_w(w_new_value) + except OperationError, e: + if not e.match(space, space.w_TypeError): + raise + w_self.setdictvalue(space, 'asname', w_new_value) + return w_self.initialization_state |= 2 _alias_field_unroller = unrolling_iterable(['name', 'asname']) diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -50,8 +50,8 @@ s = mod.body assert s.s == "hi" s.s = "pypy" - raises(TypeError, setattr, s, "s", 43) - assert eval(compile(mod, "", "eval")) == "pypy" + s.s = 43 + assert eval(compile(mod, "", "eval")) == 43 def test_empty_initialization(self): ast = self.ast @@ -82,7 +82,6 @@ assert name.id == "name_word" name.id = "hi" assert name.id == "hi" - raises(TypeError, setattr, name, "id", 32) def test_bool(self): ast = self.ast @@ -92,6 +91,7 @@ pr.nl = True assert pr.nl + @py.test.mark.skipif("py.test.config.option.runappdirect") def test_object(self): ast = self.ast const = ast.Const(4) @@ -131,7 +131,7 @@ def test_ast_types(self): ast = self.ast expr = ast.Expr() - raises(TypeError, setattr, expr, "value", ast.Lt()) + expr.value = ast.Lt() def test_abstract_ast_types(self): ast = self.ast @@ -210,8 +210,6 @@ exc = raises(AttributeError, getattr, x, 'lineno') assert exc.value.args[0] == "'Num' object has no attribute 'lineno'" - skip("WIP") - y = ast.Num() x.lineno = y assert x.lineno == y @@ -220,12 +218,12 @@ assert exc.value.args[0] == "'Num' object has no attribute 'foobar'" x = ast.Num(lineno=2) - self.assertEquals(x.lineno, 2) + assert x.lineno == 2 x = ast.Num(42, lineno=0) - self.assertEquals(x.lineno, 0) - self.assertEquals(x._fields, ('n',)) - self.assertEquals(x.n, 42) + assert x.lineno == 0 + assert x._fields == ('n',) + assert x.n == 42 - self.assertRaises(TypeError, ast.Num, 1, 2) - self.assertRaises(TypeError, ast.Num, 1, 2, lineno=0) + raises(TypeError, ast.Num, 1, 2) + raises(TypeError, ast.Num, 1, 2, lineno=0) From commits-noreply at bitbucket.org Fri Mar 11 19:39:22 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 19:39:22 +0100 (CET) Subject: [pypy-svn] pypy default: Try to fix translation Message-ID: <20110311183922.5728A36C210@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42523:d0aeef07b06a Date: 2011-03-11 19:39 +0100 http://bitbucket.org/pypy/pypy/changeset/d0aeef07b06a/ Log: Try to fix translation diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -429,7 +429,7 @@ else: flag = self.data.field_masks[field] if not field.seq: - self.emit("if getattr(w_self, 'w_dict', None):", 1) + self.emit("if w_self.w_dict is not None:", 1) self.emit(" w_obj = w_self.getdictvalue(space, '%s')" % (field.name,), 1) self.emit(" if w_obj is not None:", 1) self.emit(" return w_obj", 1) @@ -561,7 +561,7 @@ class AST(Wrappable): - __slots__ = ("initialization_state", "w_dict") + w_dict = None __metaclass__ = extendabletype @@ -575,7 +575,7 @@ raise NotImplementedError def getdict(self, space): - if not hasattr(self, 'w_dict'): + if self.w_dict is None: self.w_dict = space.newdict(instance=True) return self.w_dict diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -10,10 +10,13 @@ class AST(Wrappable): - __slots__ = ("initialization_state", "w_dict") + w_dict = None __metaclass__ = extendabletype + def __init__(self): + self.w_dict = None + def walkabout(self, visitor): raise AssertionError("walkabout() implementation not provided") @@ -24,7 +27,7 @@ raise NotImplementedError def getdict(self, space): - if not hasattr(self, 'w_dict'): + if self.w_dict is None: self.w_dict = space.newdict(instance=True) return self.w_dict @@ -209,6 +212,7 @@ __slots__ = ('lineno', 'col_offset') def __init__(self, lineno, col_offset): + AST.__init__(self) self.lineno = lineno self.col_offset = col_offset @@ -1188,6 +1192,7 @@ __slots__ = ('lineno', 'col_offset') def __init__(self, lineno, col_offset): + AST.__init__(self) self.lineno = lineno self.col_offset = col_offset @@ -2085,6 +2090,7 @@ def __init__(self): + AST.__init__(self) self.initialization_state = 0 def walkabout(self, visitor): @@ -2480,6 +2486,7 @@ __slots__ = ('lineno', 'col_offset') def __init__(self, lineno, col_offset): + AST.__init__(self) self.lineno = lineno self.col_offset = col_offset @@ -3143,7 +3150,7 @@ Interactive.typedef.acceptable_as_base_class = False def Expression_get_body(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'body') if w_obj is not None: return w_obj @@ -3231,7 +3238,7 @@ Suite.typedef.acceptable_as_base_class = False def stmt_get_lineno(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'lineno') if w_obj is not None: return w_obj @@ -3252,7 +3259,7 @@ w_self.initialization_state |= w_self._lineno_mask def stmt_get_col_offset(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'col_offset') if w_obj is not None: return w_obj @@ -3281,7 +3288,7 @@ stmt.typedef.acceptable_as_base_class = False def FunctionDef_get_name(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'name') if w_obj is not None: return w_obj @@ -3302,7 +3309,7 @@ w_self.initialization_state |= 1 def FunctionDef_get_args(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'args') if w_obj is not None: return w_obj @@ -3388,7 +3395,7 @@ FunctionDef.typedef.acceptable_as_base_class = False def ClassDef_get_name(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'name') if w_obj is not None: return w_obj @@ -3493,7 +3500,7 @@ ClassDef.typedef.acceptable_as_base_class = False def Return_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -3599,7 +3606,7 @@ w_self.initialization_state |= 1 def Assign_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -3646,7 +3653,7 @@ Assign.typedef.acceptable_as_base_class = False def AugAssign_get_target(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'target') if w_obj is not None: return w_obj @@ -3667,7 +3674,7 @@ w_self.initialization_state |= 1 def AugAssign_get_op(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'op') if w_obj is not None: return w_obj @@ -3689,7 +3696,7 @@ w_self.initialization_state |= 2 def AugAssign_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -3736,7 +3743,7 @@ AugAssign.typedef.acceptable_as_base_class = False def Print_get_dest(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'dest') if w_obj is not None: return w_obj @@ -3775,7 +3782,7 @@ w_self.initialization_state |= 2 def Print_get_nl(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'nl') if w_obj is not None: return w_obj @@ -3823,7 +3830,7 @@ Print.typedef.acceptable_as_base_class = False def For_get_target(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'target') if w_obj is not None: return w_obj @@ -3844,7 +3851,7 @@ w_self.initialization_state |= 1 def For_get_iter(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'iter') if w_obj is not None: return w_obj @@ -3930,7 +3937,7 @@ For.typedef.acceptable_as_base_class = False def While_get_test(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'test') if w_obj is not None: return w_obj @@ -4015,7 +4022,7 @@ While.typedef.acceptable_as_base_class = False def If_get_test(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'test') if w_obj is not None: return w_obj @@ -4100,7 +4107,7 @@ If.typedef.acceptable_as_base_class = False def With_get_context_expr(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'context_expr') if w_obj is not None: return w_obj @@ -4121,7 +4128,7 @@ w_self.initialization_state |= 1 def With_get_optional_vars(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'optional_vars') if w_obj is not None: return w_obj @@ -4187,7 +4194,7 @@ With.typedef.acceptable_as_base_class = False def Raise_get_type(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'type') if w_obj is not None: return w_obj @@ -4208,7 +4215,7 @@ w_self.initialization_state |= 1 def Raise_get_inst(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'inst') if w_obj is not None: return w_obj @@ -4229,7 +4236,7 @@ w_self.initialization_state |= 2 def Raise_get_tback(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'tback') if w_obj is not None: return w_obj @@ -4422,7 +4429,7 @@ TryFinally.typedef.acceptable_as_base_class = False def Assert_get_test(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'test') if w_obj is not None: return w_obj @@ -4443,7 +4450,7 @@ w_self.initialization_state |= 1 def Assert_get_msg(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'msg') if w_obj is not None: return w_obj @@ -4532,7 +4539,7 @@ Import.typedef.acceptable_as_base_class = False def ImportFrom_get_module(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'module') if w_obj is not None: return w_obj @@ -4574,7 +4581,7 @@ w_self.initialization_state |= 2 def ImportFrom_get_level(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'level') if w_obj is not None: return w_obj @@ -4622,7 +4629,7 @@ ImportFrom.typedef.acceptable_as_base_class = False def Exec_get_body(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'body') if w_obj is not None: return w_obj @@ -4643,7 +4650,7 @@ w_self.initialization_state |= 1 def Exec_get_globals(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'globals') if w_obj is not None: return w_obj @@ -4664,7 +4671,7 @@ w_self.initialization_state |= 2 def Exec_get_locals(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'locals') if w_obj is not None: return w_obj @@ -4754,7 +4761,7 @@ Global.typedef.acceptable_as_base_class = False def Expr_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -4850,7 +4857,7 @@ Continue.typedef.acceptable_as_base_class = False def expr_get_lineno(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'lineno') if w_obj is not None: return w_obj @@ -4871,7 +4878,7 @@ w_self.initialization_state |= w_self._lineno_mask def expr_get_col_offset(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'col_offset') if w_obj is not None: return w_obj @@ -4900,7 +4907,7 @@ expr.typedef.acceptable_as_base_class = False def BoolOp_get_op(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'op') if w_obj is not None: return w_obj @@ -4966,7 +4973,7 @@ BoolOp.typedef.acceptable_as_base_class = False def BinOp_get_left(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'left') if w_obj is not None: return w_obj @@ -4987,7 +4994,7 @@ w_self.initialization_state |= 1 def BinOp_get_op(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'op') if w_obj is not None: return w_obj @@ -5009,7 +5016,7 @@ w_self.initialization_state |= 2 def BinOp_get_right(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'right') if w_obj is not None: return w_obj @@ -5056,7 +5063,7 @@ BinOp.typedef.acceptable_as_base_class = False def UnaryOp_get_op(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'op') if w_obj is not None: return w_obj @@ -5078,7 +5085,7 @@ w_self.initialization_state |= 1 def UnaryOp_get_operand(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'operand') if w_obj is not None: return w_obj @@ -5124,7 +5131,7 @@ UnaryOp.typedef.acceptable_as_base_class = False def Lambda_get_args(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'args') if w_obj is not None: return w_obj @@ -5145,7 +5152,7 @@ w_self.initialization_state |= 1 def Lambda_get_body(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'body') if w_obj is not None: return w_obj @@ -5191,7 +5198,7 @@ Lambda.typedef.acceptable_as_base_class = False def IfExp_get_test(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'test') if w_obj is not None: return w_obj @@ -5212,7 +5219,7 @@ w_self.initialization_state |= 1 def IfExp_get_body(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'body') if w_obj is not None: return w_obj @@ -5233,7 +5240,7 @@ w_self.initialization_state |= 2 def IfExp_get_orelse(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'orelse') if w_obj is not None: return w_obj @@ -5386,7 +5393,7 @@ Set.typedef.acceptable_as_base_class = False def ListComp_get_elt(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'elt') if w_obj is not None: return w_obj @@ -5451,7 +5458,7 @@ ListComp.typedef.acceptable_as_base_class = False def SetComp_get_elt(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'elt') if w_obj is not None: return w_obj @@ -5516,7 +5523,7 @@ SetComp.typedef.acceptable_as_base_class = False def DictComp_get_key(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'key') if w_obj is not None: return w_obj @@ -5537,7 +5544,7 @@ w_self.initialization_state |= 1 def DictComp_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -5603,7 +5610,7 @@ DictComp.typedef.acceptable_as_base_class = False def GeneratorExp_get_elt(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'elt') if w_obj is not None: return w_obj @@ -5668,7 +5675,7 @@ GeneratorExp.typedef.acceptable_as_base_class = False def Yield_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -5713,7 +5720,7 @@ Yield.typedef.acceptable_as_base_class = False def Compare_get_left(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'left') if w_obj is not None: return w_obj @@ -5798,7 +5805,7 @@ Compare.typedef.acceptable_as_base_class = False def Call_get_func(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'func') if w_obj is not None: return w_obj @@ -5855,7 +5862,7 @@ w_self.initialization_state |= 4 def Call_get_starargs(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'starargs') if w_obj is not None: return w_obj @@ -5876,7 +5883,7 @@ w_self.initialization_state |= 8 def Call_get_kwargs(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'kwargs') if w_obj is not None: return w_obj @@ -5927,7 +5934,7 @@ Call.typedef.acceptable_as_base_class = False def Repr_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -5972,7 +5979,7 @@ Repr.typedef.acceptable_as_base_class = False def Num_get_n(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'n') if w_obj is not None: return w_obj @@ -6017,7 +6024,7 @@ Num.typedef.acceptable_as_base_class = False def Str_get_s(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 's') if w_obj is not None: return w_obj @@ -6062,7 +6069,7 @@ Str.typedef.acceptable_as_base_class = False def Attribute_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -6083,7 +6090,7 @@ w_self.initialization_state |= 1 def Attribute_get_attr(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'attr') if w_obj is not None: return w_obj @@ -6104,7 +6111,7 @@ w_self.initialization_state |= 2 def Attribute_get_ctx(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'ctx') if w_obj is not None: return w_obj @@ -6152,7 +6159,7 @@ Attribute.typedef.acceptable_as_base_class = False def Subscript_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -6173,7 +6180,7 @@ w_self.initialization_state |= 1 def Subscript_get_slice(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'slice') if w_obj is not None: return w_obj @@ -6194,7 +6201,7 @@ w_self.initialization_state |= 2 def Subscript_get_ctx(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'ctx') if w_obj is not None: return w_obj @@ -6242,7 +6249,7 @@ Subscript.typedef.acceptable_as_base_class = False def Name_get_id(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'id') if w_obj is not None: return w_obj @@ -6263,7 +6270,7 @@ w_self.initialization_state |= 1 def Name_get_ctx(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'ctx') if w_obj is not None: return w_obj @@ -6328,7 +6335,7 @@ w_self.initialization_state |= 1 def List_get_ctx(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'ctx') if w_obj is not None: return w_obj @@ -6394,7 +6401,7 @@ w_self.initialization_state |= 1 def Tuple_get_ctx(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'ctx') if w_obj is not None: return w_obj @@ -6442,7 +6449,7 @@ Tuple.typedef.acceptable_as_base_class = False def Const_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -6558,7 +6565,7 @@ Ellipsis.typedef.acceptable_as_base_class = False def Slice_get_lower(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'lower') if w_obj is not None: return w_obj @@ -6579,7 +6586,7 @@ w_self.initialization_state |= 1 def Slice_get_upper(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'upper') if w_obj is not None: return w_obj @@ -6600,7 +6607,7 @@ w_self.initialization_state |= 2 def Slice_get_step(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'step') if w_obj is not None: return w_obj @@ -6690,7 +6697,7 @@ ExtSlice.typedef.acceptable_as_base_class = False def Index_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -6955,7 +6962,7 @@ _NotIn.typedef.acceptable_as_base_class = False def comprehension_get_target(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'target') if w_obj is not None: return w_obj @@ -6976,7 +6983,7 @@ w_self.initialization_state |= 1 def comprehension_get_iter(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'iter') if w_obj is not None: return w_obj @@ -7042,7 +7049,7 @@ comprehension.typedef.acceptable_as_base_class = False def excepthandler_get_lineno(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'lineno') if w_obj is not None: return w_obj @@ -7063,7 +7070,7 @@ w_self.initialization_state |= w_self._lineno_mask def excepthandler_get_col_offset(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'col_offset') if w_obj is not None: return w_obj @@ -7092,7 +7099,7 @@ excepthandler.typedef.acceptable_as_base_class = False def ExceptHandler_get_type(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'type') if w_obj is not None: return w_obj @@ -7113,7 +7120,7 @@ w_self.initialization_state |= 1 def ExceptHandler_get_name(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'name') if w_obj is not None: return w_obj @@ -7197,7 +7204,7 @@ w_self.initialization_state |= 1 def arguments_get_vararg(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'vararg') if w_obj is not None: return w_obj @@ -7221,7 +7228,7 @@ w_self.initialization_state |= 2 def arguments_get_kwarg(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'kwarg') if w_obj is not None: return w_obj @@ -7292,7 +7299,7 @@ arguments.typedef.acceptable_as_base_class = False def keyword_get_arg(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'arg') if w_obj is not None: return w_obj @@ -7313,7 +7320,7 @@ w_self.initialization_state |= 1 def keyword_get_value(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'value') if w_obj is not None: return w_obj @@ -7359,7 +7366,7 @@ keyword.typedef.acceptable_as_base_class = False def alias_get_name(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'name') if w_obj is not None: return w_obj @@ -7380,7 +7387,7 @@ w_self.initialization_state |= 1 def alias_get_asname(space, w_self): - if getattr(w_self, 'w_dict', None): + if w_self.w_dict is not None: w_obj = w_self.getdictvalue(space, 'asname') if w_obj is not None: return w_obj From fijall at gmail.com Fri Mar 11 19:55:58 2011 From: fijall at gmail.com (Maciej Fijalkowski) Date: Fri, 11 Mar 2011 13:55:58 -0500 Subject: [pypy-svn] pypy default: Try to fix translation In-Reply-To: <20110311183922.5728A36C210@codespeak.net> References: <20110311183922.5728A36C210@codespeak.net> Message-ID: Why this removed __slots__? On Fri, Mar 11, 2011 at 1:39 PM, amauryfa wrote: > Author: Amaury Forgeot d'Arc > Branch: > Changeset: r42523:d0aeef07b06a > Date: 2011-03-11 19:39 +0100 > http://bitbucket.org/pypy/pypy/changeset/d0aeef07b06a/ > > Log: ? ?Try to fix translation > > diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py > --- a/pypy/interpreter/astcompiler/tools/asdl_py.py > +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py > @@ -429,7 +429,7 @@ > ? ? ? ? else: > ? ? ? ? ? ? flag = self.data.field_masks[field] > ? ? ? ? if not field.seq: > - ? ? ? ? ? ?self.emit("if getattr(w_self, 'w_dict', None):", 1) > + ? ? ? ? ? ?self.emit("if w_self.w_dict is not None:", 1) > ? ? ? ? ? ? self.emit(" ? ?w_obj = w_self.getdictvalue(space, '%s')" % (field.name,), 1) > ? ? ? ? ? ? self.emit(" ? ?if w_obj is not None:", 1) > ? ? ? ? ? ? self.emit(" ? ? ? ?return w_obj", 1) > @@ -561,7 +561,7 @@ > > ?class AST(Wrappable): > > - ? ?__slots__ = ("initialization_state", "w_dict") > + ? ?w_dict = None > > ? ? __metaclass__ = extendabletype > > @@ -575,7 +575,7 @@ > ? ? ? ? raise NotImplementedError > > ? ? def getdict(self, space): > - ? ? ? ?if not hasattr(self, 'w_dict'): > + ? ? ? ?if self.w_dict is None: > ? ? ? ? ? ? self.w_dict = space.newdict(instance=True) > ? ? ? ? return self.w_dict > > > diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py > --- a/pypy/interpreter/astcompiler/ast.py > +++ b/pypy/interpreter/astcompiler/ast.py > @@ -10,10 +10,13 @@ > > ?class AST(Wrappable): > > - ? ?__slots__ = ("initialization_state", "w_dict") > + ? ?w_dict = None > > ? ? __metaclass__ = extendabletype > > + ? ?def __init__(self): > + ? ? ? ?self.w_dict = None > + > ? ? def walkabout(self, visitor): > ? ? ? ? raise AssertionError("walkabout() implementation not provided") > > @@ -24,7 +27,7 @@ > ? ? ? ? raise NotImplementedError > > ? ? def getdict(self, space): > - ? ? ? ?if not hasattr(self, 'w_dict'): > + ? ? ? ?if self.w_dict is None: > ? ? ? ? ? ? self.w_dict = space.newdict(instance=True) > ? ? ? ? return self.w_dict > > @@ -209,6 +212,7 @@ > ? ? __slots__ = ('lineno', 'col_offset') > > ? ? def __init__(self, lineno, col_offset): > + ? ? ? ?AST.__init__(self) > ? ? ? ? self.lineno = lineno > ? ? ? ? self.col_offset = col_offset > > @@ -1188,6 +1192,7 @@ > ? ? __slots__ = ('lineno', 'col_offset') > > ? ? def __init__(self, lineno, col_offset): > + ? ? ? ?AST.__init__(self) > ? ? ? ? self.lineno = lineno > ? ? ? ? self.col_offset = col_offset > > @@ -2085,6 +2090,7 @@ > > > ? ? def __init__(self): > + ? ? ? ?AST.__init__(self) > ? ? ? ? self.initialization_state = 0 > > ? ? def walkabout(self, visitor): > @@ -2480,6 +2486,7 @@ > ? ? __slots__ = ('lineno', 'col_offset') > > ? ? def __init__(self, lineno, col_offset): > + ? ? ? ?AST.__init__(self) > ? ? ? ? self.lineno = lineno > ? ? ? ? self.col_offset = col_offset > > @@ -3143,7 +3150,7 @@ > ?Interactive.typedef.acceptable_as_base_class = False > > ?def Expression_get_body(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'body') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3231,7 +3238,7 @@ > ?Suite.typedef.acceptable_as_base_class = False > > ?def stmt_get_lineno(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'lineno') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3252,7 +3259,7 @@ > ? ? w_self.initialization_state |= w_self._lineno_mask > > ?def stmt_get_col_offset(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'col_offset') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3281,7 +3288,7 @@ > ?stmt.typedef.acceptable_as_base_class = False > > ?def FunctionDef_get_name(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'name') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3302,7 +3309,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def FunctionDef_get_args(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'args') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3388,7 +3395,7 @@ > ?FunctionDef.typedef.acceptable_as_base_class = False > > ?def ClassDef_get_name(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'name') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3493,7 +3500,7 @@ > ?ClassDef.typedef.acceptable_as_base_class = False > > ?def Return_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3599,7 +3606,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Assign_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3646,7 +3653,7 @@ > ?Assign.typedef.acceptable_as_base_class = False > > ?def AugAssign_get_target(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'target') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3667,7 +3674,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def AugAssign_get_op(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'op') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3689,7 +3696,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def AugAssign_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3736,7 +3743,7 @@ > ?AugAssign.typedef.acceptable_as_base_class = False > > ?def Print_get_dest(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'dest') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3775,7 +3782,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Print_get_nl(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'nl') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3823,7 +3830,7 @@ > ?Print.typedef.acceptable_as_base_class = False > > ?def For_get_target(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'target') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3844,7 +3851,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def For_get_iter(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'iter') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -3930,7 +3937,7 @@ > ?For.typedef.acceptable_as_base_class = False > > ?def While_get_test(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'test') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4015,7 +4022,7 @@ > ?While.typedef.acceptable_as_base_class = False > > ?def If_get_test(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'test') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4100,7 +4107,7 @@ > ?If.typedef.acceptable_as_base_class = False > > ?def With_get_context_expr(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'context_expr') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4121,7 +4128,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def With_get_optional_vars(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'optional_vars') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4187,7 +4194,7 @@ > ?With.typedef.acceptable_as_base_class = False > > ?def Raise_get_type(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'type') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4208,7 +4215,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Raise_get_inst(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'inst') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4229,7 +4236,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Raise_get_tback(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'tback') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4422,7 +4429,7 @@ > ?TryFinally.typedef.acceptable_as_base_class = False > > ?def Assert_get_test(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'test') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4443,7 +4450,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Assert_get_msg(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'msg') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4532,7 +4539,7 @@ > ?Import.typedef.acceptable_as_base_class = False > > ?def ImportFrom_get_module(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'module') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4574,7 +4581,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def ImportFrom_get_level(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'level') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4622,7 +4629,7 @@ > ?ImportFrom.typedef.acceptable_as_base_class = False > > ?def Exec_get_body(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'body') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4643,7 +4650,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Exec_get_globals(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'globals') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4664,7 +4671,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Exec_get_locals(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'locals') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4754,7 +4761,7 @@ > ?Global.typedef.acceptable_as_base_class = False > > ?def Expr_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4850,7 +4857,7 @@ > ?Continue.typedef.acceptable_as_base_class = False > > ?def expr_get_lineno(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'lineno') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4871,7 +4878,7 @@ > ? ? w_self.initialization_state |= w_self._lineno_mask > > ?def expr_get_col_offset(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'col_offset') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4900,7 +4907,7 @@ > ?expr.typedef.acceptable_as_base_class = False > > ?def BoolOp_get_op(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'op') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4966,7 +4973,7 @@ > ?BoolOp.typedef.acceptable_as_base_class = False > > ?def BinOp_get_left(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'left') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -4987,7 +4994,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def BinOp_get_op(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'op') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5009,7 +5016,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def BinOp_get_right(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'right') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5056,7 +5063,7 @@ > ?BinOp.typedef.acceptable_as_base_class = False > > ?def UnaryOp_get_op(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'op') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5078,7 +5085,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def UnaryOp_get_operand(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'operand') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5124,7 +5131,7 @@ > ?UnaryOp.typedef.acceptable_as_base_class = False > > ?def Lambda_get_args(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'args') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5145,7 +5152,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Lambda_get_body(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'body') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5191,7 +5198,7 @@ > ?Lambda.typedef.acceptable_as_base_class = False > > ?def IfExp_get_test(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'test') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5212,7 +5219,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def IfExp_get_body(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'body') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5233,7 +5240,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def IfExp_get_orelse(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'orelse') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5386,7 +5393,7 @@ > ?Set.typedef.acceptable_as_base_class = False > > ?def ListComp_get_elt(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'elt') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5451,7 +5458,7 @@ > ?ListComp.typedef.acceptable_as_base_class = False > > ?def SetComp_get_elt(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'elt') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5516,7 +5523,7 @@ > ?SetComp.typedef.acceptable_as_base_class = False > > ?def DictComp_get_key(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'key') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5537,7 +5544,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def DictComp_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5603,7 +5610,7 @@ > ?DictComp.typedef.acceptable_as_base_class = False > > ?def GeneratorExp_get_elt(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'elt') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5668,7 +5675,7 @@ > ?GeneratorExp.typedef.acceptable_as_base_class = False > > ?def Yield_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5713,7 +5720,7 @@ > ?Yield.typedef.acceptable_as_base_class = False > > ?def Compare_get_left(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'left') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5798,7 +5805,7 @@ > ?Compare.typedef.acceptable_as_base_class = False > > ?def Call_get_func(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'func') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5855,7 +5862,7 @@ > ? ? w_self.initialization_state |= 4 > > ?def Call_get_starargs(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'starargs') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5876,7 +5883,7 @@ > ? ? w_self.initialization_state |= 8 > > ?def Call_get_kwargs(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'kwargs') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5927,7 +5934,7 @@ > ?Call.typedef.acceptable_as_base_class = False > > ?def Repr_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -5972,7 +5979,7 @@ > ?Repr.typedef.acceptable_as_base_class = False > > ?def Num_get_n(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'n') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6017,7 +6024,7 @@ > ?Num.typedef.acceptable_as_base_class = False > > ?def Str_get_s(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 's') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6062,7 +6069,7 @@ > ?Str.typedef.acceptable_as_base_class = False > > ?def Attribute_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6083,7 +6090,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Attribute_get_attr(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'attr') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6104,7 +6111,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Attribute_get_ctx(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'ctx') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6152,7 +6159,7 @@ > ?Attribute.typedef.acceptable_as_base_class = False > > ?def Subscript_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6173,7 +6180,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Subscript_get_slice(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'slice') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6194,7 +6201,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Subscript_get_ctx(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'ctx') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6242,7 +6249,7 @@ > ?Subscript.typedef.acceptable_as_base_class = False > > ?def Name_get_id(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'id') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6263,7 +6270,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Name_get_ctx(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'ctx') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6328,7 +6335,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def List_get_ctx(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'ctx') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6394,7 +6401,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Tuple_get_ctx(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'ctx') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6442,7 +6449,7 @@ > ?Tuple.typedef.acceptable_as_base_class = False > > ?def Const_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6558,7 +6565,7 @@ > ?Ellipsis.typedef.acceptable_as_base_class = False > > ?def Slice_get_lower(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'lower') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6579,7 +6586,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def Slice_get_upper(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'upper') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6600,7 +6607,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def Slice_get_step(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'step') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6690,7 +6697,7 @@ > ?ExtSlice.typedef.acceptable_as_base_class = False > > ?def Index_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6955,7 +6962,7 @@ > ?_NotIn.typedef.acceptable_as_base_class = False > > ?def comprehension_get_target(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'target') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -6976,7 +6983,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def comprehension_get_iter(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'iter') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7042,7 +7049,7 @@ > ?comprehension.typedef.acceptable_as_base_class = False > > ?def excepthandler_get_lineno(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'lineno') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7063,7 +7070,7 @@ > ? ? w_self.initialization_state |= w_self._lineno_mask > > ?def excepthandler_get_col_offset(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'col_offset') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7092,7 +7099,7 @@ > ?excepthandler.typedef.acceptable_as_base_class = False > > ?def ExceptHandler_get_type(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'type') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7113,7 +7120,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def ExceptHandler_get_name(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'name') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7197,7 +7204,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def arguments_get_vararg(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'vararg') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7221,7 +7228,7 @@ > ? ? w_self.initialization_state |= 2 > > ?def arguments_get_kwarg(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'kwarg') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7292,7 +7299,7 @@ > ?arguments.typedef.acceptable_as_base_class = False > > ?def keyword_get_arg(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'arg') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7313,7 +7320,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def keyword_get_value(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'value') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7359,7 +7366,7 @@ > ?keyword.typedef.acceptable_as_base_class = False > > ?def alias_get_name(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'name') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > @@ -7380,7 +7387,7 @@ > ? ? w_self.initialization_state |= 1 > > ?def alias_get_asname(space, w_self): > - ? ?if getattr(w_self, 'w_dict', None): > + ? ?if w_self.w_dict is not None: > ? ? ? ? w_obj = w_self.getdictvalue(space, 'asname') > ? ? ? ? if w_obj is not None: > ? ? ? ? ? ? return w_obj > _______________________________________________ > pypy-svn mailing list > pypy-svn at codespeak.net > http://codespeak.net/mailman/listinfo/pypy-svn > From commits-noreply at bitbucket.org Fri Mar 11 20:31:30 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 20:31:30 +0100 (CET) Subject: [pypy-svn] pypy default: Finally remove all __slots__ and fix translation Message-ID: <20110311193130.04D12282B8B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42524:3a915c30fca2 Date: 2011-03-11 20:31 +0100 http://bitbucket.org/pypy/pypy/changeset/3a915c30fca2/ Log: Finally remove all __slots__ and fix translation diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -78,16 +78,15 @@ self.emit("") else: self.emit("class %s(AST):" % (base,)) - self.emit("") - slots = ", ".join(repr(attr.name.value) for attr in sum.attributes) - self.emit("__slots__ = (%s)" % (slots,), 1) - self.emit("") if sum.attributes: args = ", ".join(attr.name.value for attr in sum.attributes) self.emit("def __init__(self, %s):" % (args,), 1) for attr in sum.attributes: self.visit(attr) self.emit("") + else: + self.emit("pass", 1) + self.emit("") for cons in sum.types: self.visit(cons, base, sum.attributes) self.emit("") @@ -95,9 +94,6 @@ def visitProduct(self, product, name): self.emit("class %s(AST):" % (name,)) self.emit("") - slots = self.make_slots(product.fields) - self.emit("__slots__ = (%s)" % (slots,), 1) - self.emit("") self.make_constructor(product.fields, product) self.emit("") self.make_mutate_over(product, name) @@ -106,15 +102,6 @@ self.emit("") self.make_var_syncer(product.fields, product, name) - def make_slots(self, fields): - slots = [] - for field in fields: - name = repr(field.name.value) - slots.append(name) - if field.seq: - slots.append("'w_%s'" % (field.name,)) - return ", ".join(slots) - def make_var_syncer(self, fields, node, name): self.emit("def sync_app_attrs(self, space):", 1) config = (self.data.optional_masks[node], @@ -208,9 +195,6 @@ def visitConstructor(self, cons, base, extra_attributes): self.emit("class %s(%s):" % (cons.name, base)) self.emit("") - slots = self.make_slots(cons.fields) - self.emit("__slots__ = (%s)" % (slots,), 1) - self.emit("") for field in self.data.cons_attributes[cons]: subst = (field.name, self.data.field_masks[field]) self.emit("_%s_mask = %i" % subst, 1) diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -14,9 +14,6 @@ __metaclass__ = extendabletype - def __init__(self): - self.w_dict = None - def walkabout(self, visitor): raise AssertionError("walkabout() implementation not provided") @@ -75,13 +72,10 @@ class mod(AST): - - __slots__ = () + pass class Module(mod): - __slots__ = ('body', 'w_body') - def __init__(self, body): self.body = body @@ -115,8 +109,6 @@ class Interactive(mod): - __slots__ = ('body', 'w_body') - def __init__(self, body): self.body = body @@ -150,8 +142,6 @@ class Expression(mod): - __slots__ = ('body') - def __init__(self, body): self.body = body @@ -174,8 +164,6 @@ class Suite(mod): - __slots__ = ('body', 'w_body') - def __init__(self, body): self.body = body @@ -208,18 +196,12 @@ class stmt(AST): - - __slots__ = ('lineno', 'col_offset') - def __init__(self, lineno, col_offset): - AST.__init__(self) self.lineno = lineno self.col_offset = col_offset class FunctionDef(stmt): - __slots__ = ('name', 'args', 'body', 'w_body', 'decorator_list', 'w_decorator_list') - _lineno_mask = 16 _col_offset_mask = 32 @@ -274,8 +256,6 @@ class ClassDef(stmt): - __slots__ = ('name', 'bases', 'w_bases', 'body', 'w_body', 'decorator_list', 'w_decorator_list') - _lineno_mask = 16 _col_offset_mask = 32 @@ -341,8 +321,6 @@ class Return(stmt): - __slots__ = ('value') - _lineno_mask = 2 _col_offset_mask = 4 @@ -371,8 +349,6 @@ class Delete(stmt): - __slots__ = ('targets', 'w_targets') - _lineno_mask = 2 _col_offset_mask = 4 @@ -409,8 +385,6 @@ class Assign(stmt): - __slots__ = ('targets', 'w_targets', 'value') - _lineno_mask = 4 _col_offset_mask = 8 @@ -450,8 +424,6 @@ class AugAssign(stmt): - __slots__ = ('target', 'op', 'value') - _lineno_mask = 8 _col_offset_mask = 16 @@ -481,8 +453,6 @@ class Print(stmt): - __slots__ = ('dest', 'values', 'w_values', 'nl') - _lineno_mask = 8 _col_offset_mask = 16 @@ -526,8 +496,6 @@ class For(stmt): - __slots__ = ('target', 'iter', 'body', 'w_body', 'orelse', 'w_orelse') - _lineno_mask = 16 _col_offset_mask = 32 @@ -584,8 +552,6 @@ class While(stmt): - __slots__ = ('test', 'body', 'w_body', 'orelse', 'w_orelse') - _lineno_mask = 8 _col_offset_mask = 16 @@ -639,8 +605,6 @@ class If(stmt): - __slots__ = ('test', 'body', 'w_body', 'orelse', 'w_orelse') - _lineno_mask = 8 _col_offset_mask = 16 @@ -694,8 +658,6 @@ class With(stmt): - __slots__ = ('context_expr', 'optional_vars', 'body', 'w_body') - _lineno_mask = 8 _col_offset_mask = 16 @@ -741,8 +703,6 @@ class Raise(stmt): - __slots__ = ('type', 'inst', 'tback') - _lineno_mask = 8 _col_offset_mask = 16 @@ -785,8 +745,6 @@ class TryExcept(stmt): - __slots__ = ('body', 'w_body', 'handlers', 'w_handlers', 'orelse', 'w_orelse') - _lineno_mask = 8 _col_offset_mask = 16 @@ -851,8 +809,6 @@ class TryFinally(stmt): - __slots__ = ('body', 'w_body', 'finalbody', 'w_finalbody') - _lineno_mask = 4 _col_offset_mask = 8 @@ -903,8 +859,6 @@ class Assert(stmt): - __slots__ = ('test', 'msg') - _lineno_mask = 4 _col_offset_mask = 8 @@ -936,8 +890,6 @@ class Import(stmt): - __slots__ = ('names', 'w_names') - _lineno_mask = 2 _col_offset_mask = 4 @@ -974,8 +926,6 @@ class ImportFrom(stmt): - __slots__ = ('module', 'names', 'w_names', 'level') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1017,8 +967,6 @@ class Exec(stmt): - __slots__ = ('body', 'globals', 'locals') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1057,8 +1005,6 @@ class Global(stmt): - __slots__ = ('names', 'w_names') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1090,8 +1036,6 @@ class Expr(stmt): - __slots__ = ('value') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1117,8 +1061,6 @@ class Pass(stmt): - __slots__ = () - _lineno_mask = 1 _col_offset_mask = 2 @@ -1141,8 +1083,6 @@ class Break(stmt): - __slots__ = () - _lineno_mask = 1 _col_offset_mask = 2 @@ -1165,8 +1105,6 @@ class Continue(stmt): - __slots__ = () - _lineno_mask = 1 _col_offset_mask = 2 @@ -1188,18 +1126,12 @@ class expr(AST): - - __slots__ = ('lineno', 'col_offset') - def __init__(self, lineno, col_offset): - AST.__init__(self) self.lineno = lineno self.col_offset = col_offset class BoolOp(expr): - __slots__ = ('op', 'values', 'w_values') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1237,8 +1169,6 @@ class BinOp(expr): - __slots__ = ('left', 'op', 'right') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1268,8 +1198,6 @@ class UnaryOp(expr): - __slots__ = ('op', 'operand') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1296,8 +1224,6 @@ class Lambda(expr): - __slots__ = ('args', 'body') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1326,8 +1252,6 @@ class IfExp(expr): - __slots__ = ('test', 'body', 'orelse') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1359,8 +1283,6 @@ class Dict(expr): - __slots__ = ('keys', 'w_keys', 'values', 'w_values') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1411,8 +1333,6 @@ class Set(expr): - __slots__ = ('elts', 'w_elts') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1449,8 +1369,6 @@ class ListComp(expr): - __slots__ = ('elt', 'generators', 'w_generators') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1490,8 +1408,6 @@ class SetComp(expr): - __slots__ = ('elt', 'generators', 'w_generators') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1531,8 +1447,6 @@ class DictComp(expr): - __slots__ = ('key', 'value', 'generators', 'w_generators') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1575,8 +1489,6 @@ class GeneratorExp(expr): - __slots__ = ('elt', 'generators', 'w_generators') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1616,8 +1528,6 @@ class Yield(expr): - __slots__ = ('value') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1646,8 +1556,6 @@ class Compare(expr): - __slots__ = ('left', 'ops', 'w_ops', 'comparators', 'w_comparators') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1696,8 +1604,6 @@ class Call(expr): - __slots__ = ('func', 'args', 'w_args', 'keywords', 'w_keywords', 'starargs', 'kwargs') - _lineno_mask = 32 _col_offset_mask = 64 @@ -1764,8 +1670,6 @@ class Repr(expr): - __slots__ = ('value') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1791,8 +1695,6 @@ class Num(expr): - __slots__ = ('n') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1816,8 +1718,6 @@ class Str(expr): - __slots__ = ('s') - _lineno_mask = 2 _col_offset_mask = 4 @@ -1841,8 +1741,6 @@ class Attribute(expr): - __slots__ = ('value', 'attr', 'ctx') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1870,8 +1768,6 @@ class Subscript(expr): - __slots__ = ('value', 'slice', 'ctx') - _lineno_mask = 8 _col_offset_mask = 16 @@ -1901,8 +1797,6 @@ class Name(expr): - __slots__ = ('id', 'ctx') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1927,8 +1821,6 @@ class List(expr): - __slots__ = ('elts', 'w_elts', 'ctx') - _lineno_mask = 4 _col_offset_mask = 8 @@ -1966,8 +1858,6 @@ class Tuple(expr): - __slots__ = ('elts', 'w_elts', 'ctx') - _lineno_mask = 4 _col_offset_mask = 8 @@ -2005,8 +1895,6 @@ class Const(expr): - __slots__ = ('value') - _lineno_mask = 2 _col_offset_mask = 4 @@ -2081,16 +1969,12 @@ ] class slice(AST): - - __slots__ = () + pass class Ellipsis(slice): - __slots__ = () - def __init__(self): - AST.__init__(self) self.initialization_state = 0 def walkabout(self, visitor): @@ -2108,8 +1992,6 @@ class Slice(slice): - __slots__ = ('lower', 'upper', 'step') - def __init__(self, lower, upper, step): self.lower = lower @@ -2149,8 +2031,6 @@ class ExtSlice(slice): - __slots__ = ('dims', 'w_dims') - def __init__(self, dims): self.dims = dims @@ -2184,8 +2064,6 @@ class Index(slice): - __slots__ = ('value') - def __init__(self, value): self.value = value @@ -2444,8 +2322,6 @@ class comprehension(AST): - __slots__ = ('target', 'iter', 'ifs', 'w_ifs') - def __init__(self, target, iter, ifs): self.target = target self.iter = iter @@ -2482,18 +2358,12 @@ node.sync_app_attrs(space) class excepthandler(AST): - - __slots__ = ('lineno', 'col_offset') - def __init__(self, lineno, col_offset): - AST.__init__(self) self.lineno = lineno self.col_offset = col_offset class ExceptHandler(excepthandler): - __slots__ = ('type', 'name', 'body', 'w_body') - _lineno_mask = 8 _col_offset_mask = 16 @@ -2543,8 +2413,6 @@ class arguments(AST): - __slots__ = ('args', 'w_args', 'vararg', 'kwarg', 'defaults', 'w_defaults') - def __init__(self, args, vararg, kwarg, defaults): self.args = args self.w_args = None @@ -2595,8 +2463,6 @@ class keyword(AST): - __slots__ = ('arg', 'value') - def __init__(self, arg, value): self.arg = arg self.value = value @@ -2618,8 +2484,6 @@ class alias(AST): - __slots__ = ('name', 'asname') - def __init__(self, name, asname): self.name = name self.asname = asname From commits-noreply at bitbucket.org Fri Mar 11 22:59:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 11 Mar 2011 22:59:01 +0100 (CET) Subject: [pypy-svn] pypy default: Add pickle support to ast objects Message-ID: <20110311215901.41FD736C204@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42525:4604f1b75a11 Date: 2011-03-11 22:58 +0100 http://bitbucket.org/pypy/pypy/changeset/4604f1b75a11/ Log: Add pickle support to ast objects diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -341,6 +341,7 @@ display_name = name self.emit("%s.typedef = typedef.TypeDef(\"%s\"," % (name, display_name)) self.emit("%s.typedef," % (base,), 1) + self.emit("__module__='_ast',", 1) comma_fields = ", ".join(repr(field.name.value) for field in fields) self.emit("%s=_FieldsWrapper([%s])," % (fields_name, comma_fields), 1) for field in fields: @@ -563,6 +564,29 @@ self.w_dict = space.newdict(instance=True) return self.w_dict + def reduce_w(self, space): + w_dict = self.w_dict + if w_dict is None: + w_dict = space.newdict() + w_type = space.type(self) + w_fields = w_type.getdictvalue(space, "_fields") + for w_name in space.fixedview(w_fields): + space.setitem(w_dict, w_name, + space.getattr(self, w_name)) + w_attrs = space.findattr(w_type, space.wrap("_attributes")) + if w_attrs: + for w_name in space.fixedview(w_attrs): + space.setitem(w_dict, w_name, + space.getattr(self, w_name)) + return space.newtuple([space.type(self), + space.newtuple([]), + w_dict]) + + def setstate_w(self, space, w_state): + for w_name in space.unpackiterable(w_state): + space.setattr(self, w_name, + space.getitem(w_state, w_name)) + class NodeVisitorNotImplemented(Exception): pass @@ -589,6 +613,11 @@ AST.typedef = typedef.TypeDef("AST", _fields=_FieldsWrapper([]), _attributes=_FieldsWrapper([]), + __module__='_ast', + __reduce__=interp2app(AST.reduce_w), + __setstate__=interp2app(AST.setstate_w), + __dict__ = typedef.GetSetProperty(typedef.descr_get_dict, + typedef.descr_set_dict, cls=AST), ) AST.typedef.acceptable_as_base_class = False diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -28,6 +28,29 @@ self.w_dict = space.newdict(instance=True) return self.w_dict + def reduce_w(self, space): + w_dict = self.w_dict + if w_dict is None: + w_dict = space.newdict() + w_type = space.type(self) + w_fields = w_type.getdictvalue(space, "_fields") + for w_name in space.fixedview(w_fields): + space.setitem(w_dict, w_name, + space.getattr(self, w_name)) + w_attrs = space.findattr(w_type, space.wrap("_attributes")) + if w_attrs: + for w_name in space.fixedview(w_attrs): + space.setitem(w_dict, w_name, + space.getattr(self, w_name)) + return space.newtuple([space.type(self), + space.newtuple([]), + w_dict]) + + def setstate_w(self, space, w_state): + for w_name in space.unpackiterable(w_state): + space.setattr(self, w_name, + space.getitem(w_state, w_name)) + class NodeVisitorNotImplemented(Exception): pass @@ -54,6 +77,11 @@ AST.typedef = typedef.TypeDef("AST", _fields=_FieldsWrapper([]), _attributes=_FieldsWrapper([]), + __module__='_ast', + __reduce__=interp2app(AST.reduce_w), + __setstate__=interp2app(AST.setstate_w), + __dict__ = typedef.GetSetProperty(typedef.descr_get_dict, + typedef.descr_set_dict, cls=AST), ) AST.typedef.acceptable_as_base_class = False @@ -2923,6 +2951,7 @@ mod.typedef = typedef.TypeDef("mod", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) mod.typedef.acceptable_as_base_class = False @@ -2963,6 +2992,7 @@ Module.typedef = typedef.TypeDef("Module", mod.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body']), body=typedef.GetSetProperty(Module_get_body, Module_set_body, cls=Module), __new__=interp2app(get_AST_new(Module)), @@ -3006,6 +3036,7 @@ Interactive.typedef = typedef.TypeDef("Interactive", mod.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body']), body=typedef.GetSetProperty(Interactive_get_body, Interactive_set_body, cls=Interactive), __new__=interp2app(get_AST_new(Interactive)), @@ -3051,6 +3082,7 @@ Expression.typedef = typedef.TypeDef("Expression", mod.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body']), body=typedef.GetSetProperty(Expression_get_body, Expression_set_body, cls=Expression), __new__=interp2app(get_AST_new(Expression)), @@ -3094,6 +3126,7 @@ Suite.typedef = typedef.TypeDef("Suite", mod.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body']), body=typedef.GetSetProperty(Suite_get_body, Suite_set_body, cls=Suite), __new__=interp2app(get_AST_new(Suite)), @@ -3145,6 +3178,7 @@ stmt.typedef = typedef.TypeDef("stmt", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(stmt_get_lineno, stmt_set_lineno, cls=stmt), col_offset=typedef.GetSetProperty(stmt_get_col_offset, stmt_set_col_offset, cls=stmt), @@ -3248,6 +3282,7 @@ FunctionDef.typedef = typedef.TypeDef("FunctionDef", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['name', 'args', 'body', 'decorator_list']), name=typedef.GetSetProperty(FunctionDef_get_name, FunctionDef_set_name, cls=FunctionDef), args=typedef.GetSetProperty(FunctionDef_get_args, FunctionDef_set_args, cls=FunctionDef), @@ -3353,6 +3388,7 @@ ClassDef.typedef = typedef.TypeDef("ClassDef", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['name', 'bases', 'body', 'decorator_list']), name=typedef.GetSetProperty(ClassDef_get_name, ClassDef_set_name, cls=ClassDef), bases=typedef.GetSetProperty(ClassDef_get_bases, ClassDef_set_bases, cls=ClassDef), @@ -3401,6 +3437,7 @@ Return.typedef = typedef.TypeDef("Return", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Return_get_value, Return_set_value, cls=Return), __new__=interp2app(get_AST_new(Return)), @@ -3444,6 +3481,7 @@ Delete.typedef = typedef.TypeDef("Delete", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['targets']), targets=typedef.GetSetProperty(Delete_get_targets, Delete_set_targets, cls=Delete), __new__=interp2app(get_AST_new(Delete)), @@ -3508,6 +3546,7 @@ Assign.typedef = typedef.TypeDef("Assign", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['targets', 'value']), targets=typedef.GetSetProperty(Assign_get_targets, Assign_set_targets, cls=Assign), value=typedef.GetSetProperty(Assign_get_value, Assign_set_value, cls=Assign), @@ -3597,6 +3636,7 @@ AugAssign.typedef = typedef.TypeDef("AugAssign", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['target', 'op', 'value']), target=typedef.GetSetProperty(AugAssign_get_target, AugAssign_set_target, cls=AugAssign), op=typedef.GetSetProperty(AugAssign_get_op, AugAssign_set_op, cls=AugAssign), @@ -3684,6 +3724,7 @@ Print.typedef = typedef.TypeDef("Print", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['dest', 'values', 'nl']), dest=typedef.GetSetProperty(Print_get_dest, Print_set_dest, cls=Print), values=typedef.GetSetProperty(Print_get_values, Print_set_values, cls=Print), @@ -3790,6 +3831,7 @@ For.typedef = typedef.TypeDef("For", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['target', 'iter', 'body', 'orelse']), target=typedef.GetSetProperty(For_get_target, For_set_target, cls=For), iter=typedef.GetSetProperty(For_get_iter, For_set_iter, cls=For), @@ -3876,6 +3918,7 @@ While.typedef = typedef.TypeDef("While", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['test', 'body', 'orelse']), test=typedef.GetSetProperty(While_get_test, While_set_test, cls=While), body=typedef.GetSetProperty(While_get_body, While_set_body, cls=While), @@ -3961,6 +4004,7 @@ If.typedef = typedef.TypeDef("If", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['test', 'body', 'orelse']), test=typedef.GetSetProperty(If_get_test, If_set_test, cls=If), body=typedef.GetSetProperty(If_get_body, If_set_body, cls=If), @@ -4048,6 +4092,7 @@ With.typedef = typedef.TypeDef("With", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['context_expr', 'optional_vars', 'body']), context_expr=typedef.GetSetProperty(With_get_context_expr, With_set_context_expr, cls=With), optional_vars=typedef.GetSetProperty(With_get_optional_vars, With_set_optional_vars, cls=With), @@ -4137,6 +4182,7 @@ Raise.typedef = typedef.TypeDef("Raise", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['type', 'inst', 'tback']), type=typedef.GetSetProperty(Raise_get_type, Raise_set_type, cls=Raise), inst=typedef.GetSetProperty(Raise_get_inst, Raise_set_inst, cls=Raise), @@ -4220,6 +4266,7 @@ TryExcept.typedef = typedef.TypeDef("TryExcept", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body', 'handlers', 'orelse']), body=typedef.GetSetProperty(TryExcept_get_body, TryExcept_set_body, cls=TryExcept), handlers=typedef.GetSetProperty(TryExcept_get_handlers, TryExcept_set_handlers, cls=TryExcept), @@ -4284,6 +4331,7 @@ TryFinally.typedef = typedef.TypeDef("TryFinally", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body', 'finalbody']), body=typedef.GetSetProperty(TryFinally_get_body, TryFinally_set_body, cls=TryFinally), finalbody=typedef.GetSetProperty(TryFinally_get_finalbody, TryFinally_set_finalbody, cls=TryFinally), @@ -4351,6 +4399,7 @@ Assert.typedef = typedef.TypeDef("Assert", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['test', 'msg']), test=typedef.GetSetProperty(Assert_get_test, Assert_set_test, cls=Assert), msg=typedef.GetSetProperty(Assert_get_msg, Assert_set_msg, cls=Assert), @@ -4395,6 +4444,7 @@ Import.typedef = typedef.TypeDef("Import", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['names']), names=typedef.GetSetProperty(Import_get_names, Import_set_names, cls=Import), __new__=interp2app(get_AST_new(Import)), @@ -4483,6 +4533,7 @@ ImportFrom.typedef = typedef.TypeDef("ImportFrom", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['module', 'names', 'level']), module=typedef.GetSetProperty(ImportFrom_get_module, ImportFrom_set_module, cls=ImportFrom), names=typedef.GetSetProperty(ImportFrom_get_names, ImportFrom_set_names, cls=ImportFrom), @@ -4572,6 +4623,7 @@ Exec.typedef = typedef.TypeDef("Exec", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['body', 'globals', 'locals']), body=typedef.GetSetProperty(Exec_get_body, Exec_set_body, cls=Exec), globals=typedef.GetSetProperty(Exec_get_globals, Exec_set_globals, cls=Exec), @@ -4617,6 +4669,7 @@ Global.typedef = typedef.TypeDef("Global", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['names']), names=typedef.GetSetProperty(Global_get_names, Global_set_names, cls=Global), __new__=interp2app(get_AST_new(Global)), @@ -4662,6 +4715,7 @@ Expr.typedef = typedef.TypeDef("Expr", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Expr_get_value, Expr_set_value, cls=Expr), __new__=interp2app(get_AST_new(Expr)), @@ -4680,6 +4734,7 @@ Pass.typedef = typedef.TypeDef("Pass", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(Pass)), __init__=interp2app(Pass_init), @@ -4697,6 +4752,7 @@ Break.typedef = typedef.TypeDef("Break", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(Break)), __init__=interp2app(Break_init), @@ -4714,6 +4770,7 @@ Continue.typedef = typedef.TypeDef("Continue", stmt.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(Continue)), __init__=interp2app(Continue_init), @@ -4764,6 +4821,7 @@ expr.typedef = typedef.TypeDef("expr", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(expr_get_lineno, expr_set_lineno, cls=expr), col_offset=typedef.GetSetProperty(expr_get_col_offset, expr_set_col_offset, cls=expr), @@ -4828,6 +4886,7 @@ BoolOp.typedef = typedef.TypeDef("BoolOp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['op', 'values']), op=typedef.GetSetProperty(BoolOp_get_op, BoolOp_set_op, cls=BoolOp), values=typedef.GetSetProperty(BoolOp_get_values, BoolOp_set_values, cls=BoolOp), @@ -4917,6 +4976,7 @@ BinOp.typedef = typedef.TypeDef("BinOp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['left', 'op', 'right']), left=typedef.GetSetProperty(BinOp_get_left, BinOp_set_left, cls=BinOp), op=typedef.GetSetProperty(BinOp_get_op, BinOp_set_op, cls=BinOp), @@ -4986,6 +5046,7 @@ UnaryOp.typedef = typedef.TypeDef("UnaryOp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['op', 'operand']), op=typedef.GetSetProperty(UnaryOp_get_op, UnaryOp_set_op, cls=UnaryOp), operand=typedef.GetSetProperty(UnaryOp_get_operand, UnaryOp_set_operand, cls=UnaryOp), @@ -5053,6 +5114,7 @@ Lambda.typedef = typedef.TypeDef("Lambda", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['args', 'body']), args=typedef.GetSetProperty(Lambda_get_args, Lambda_set_args, cls=Lambda), body=typedef.GetSetProperty(Lambda_get_body, Lambda_set_body, cls=Lambda), @@ -5141,6 +5203,7 @@ IfExp.typedef = typedef.TypeDef("IfExp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['test', 'body', 'orelse']), test=typedef.GetSetProperty(IfExp_get_test, IfExp_set_test, cls=IfExp), body=typedef.GetSetProperty(IfExp_get_body, IfExp_set_body, cls=IfExp), @@ -5205,6 +5268,7 @@ Dict.typedef = typedef.TypeDef("Dict", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['keys', 'values']), keys=typedef.GetSetProperty(Dict_get_keys, Dict_set_keys, cls=Dict), values=typedef.GetSetProperty(Dict_get_values, Dict_set_values, cls=Dict), @@ -5249,6 +5313,7 @@ Set.typedef = typedef.TypeDef("Set", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elts']), elts=typedef.GetSetProperty(Set_get_elts, Set_set_elts, cls=Set), __new__=interp2app(get_AST_new(Set)), @@ -5313,6 +5378,7 @@ ListComp.typedef = typedef.TypeDef("ListComp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elt', 'generators']), elt=typedef.GetSetProperty(ListComp_get_elt, ListComp_set_elt, cls=ListComp), generators=typedef.GetSetProperty(ListComp_get_generators, ListComp_set_generators, cls=ListComp), @@ -5378,6 +5444,7 @@ SetComp.typedef = typedef.TypeDef("SetComp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elt', 'generators']), elt=typedef.GetSetProperty(SetComp_get_elt, SetComp_set_elt, cls=SetComp), generators=typedef.GetSetProperty(SetComp_get_generators, SetComp_set_generators, cls=SetComp), @@ -5464,6 +5531,7 @@ DictComp.typedef = typedef.TypeDef("DictComp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['key', 'value', 'generators']), key=typedef.GetSetProperty(DictComp_get_key, DictComp_set_key, cls=DictComp), value=typedef.GetSetProperty(DictComp_get_value, DictComp_set_value, cls=DictComp), @@ -5530,6 +5598,7 @@ GeneratorExp.typedef = typedef.TypeDef("GeneratorExp", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elt', 'generators']), elt=typedef.GetSetProperty(GeneratorExp_get_elt, GeneratorExp_set_elt, cls=GeneratorExp), generators=typedef.GetSetProperty(GeneratorExp_get_generators, GeneratorExp_set_generators, cls=GeneratorExp), @@ -5576,6 +5645,7 @@ Yield.typedef = typedef.TypeDef("Yield", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Yield_get_value, Yield_set_value, cls=Yield), __new__=interp2app(get_AST_new(Yield)), @@ -5659,6 +5729,7 @@ Compare.typedef = typedef.TypeDef("Compare", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['left', 'ops', 'comparators']), left=typedef.GetSetProperty(Compare_get_left, Compare_set_left, cls=Compare), ops=typedef.GetSetProperty(Compare_get_ops, Compare_set_ops, cls=Compare), @@ -5786,6 +5857,7 @@ Call.typedef = typedef.TypeDef("Call", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['func', 'args', 'keywords', 'starargs', 'kwargs']), func=typedef.GetSetProperty(Call_get_func, Call_set_func, cls=Call), args=typedef.GetSetProperty(Call_get_args, Call_set_args, cls=Call), @@ -5835,6 +5907,7 @@ Repr.typedef = typedef.TypeDef("Repr", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Repr_get_value, Repr_set_value, cls=Repr), __new__=interp2app(get_AST_new(Repr)), @@ -5880,6 +5953,7 @@ Num.typedef = typedef.TypeDef("Num", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['n']), n=typedef.GetSetProperty(Num_get_n, Num_set_n, cls=Num), __new__=interp2app(get_AST_new(Num)), @@ -5925,6 +5999,7 @@ Str.typedef = typedef.TypeDef("Str", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['s']), s=typedef.GetSetProperty(Str_get_s, Str_set_s, cls=Str), __new__=interp2app(get_AST_new(Str)), @@ -6013,6 +6088,7 @@ Attribute.typedef = typedef.TypeDef("Attribute", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value', 'attr', 'ctx']), value=typedef.GetSetProperty(Attribute_get_value, Attribute_set_value, cls=Attribute), attr=typedef.GetSetProperty(Attribute_get_attr, Attribute_set_attr, cls=Attribute), @@ -6103,6 +6179,7 @@ Subscript.typedef = typedef.TypeDef("Subscript", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value', 'slice', 'ctx']), value=typedef.GetSetProperty(Subscript_get_value, Subscript_set_value, cls=Subscript), slice=typedef.GetSetProperty(Subscript_get_slice, Subscript_set_slice, cls=Subscript), @@ -6172,6 +6249,7 @@ Name.typedef = typedef.TypeDef("Name", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['id', 'ctx']), id=typedef.GetSetProperty(Name_get_id, Name_set_id, cls=Name), ctx=typedef.GetSetProperty(Name_get_ctx, Name_set_ctx, cls=Name), @@ -6238,6 +6316,7 @@ List.typedef = typedef.TypeDef("List", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elts', 'ctx']), elts=typedef.GetSetProperty(List_get_elts, List_set_elts, cls=List), ctx=typedef.GetSetProperty(List_get_ctx, List_set_ctx, cls=List), @@ -6304,6 +6383,7 @@ Tuple.typedef = typedef.TypeDef("Tuple", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['elts', 'ctx']), elts=typedef.GetSetProperty(Tuple_get_elts, Tuple_set_elts, cls=Tuple), ctx=typedef.GetSetProperty(Tuple_get_ctx, Tuple_set_ctx, cls=Tuple), @@ -6350,6 +6430,7 @@ Const.typedef = typedef.TypeDef("Const", expr.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Const_get_value, Const_set_value, cls=Const), __new__=interp2app(get_AST_new(Const)), @@ -6359,12 +6440,14 @@ expr_context.typedef = typedef.TypeDef("expr_context", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) expr_context.typedef.acceptable_as_base_class = False _Load.typedef = typedef.TypeDef("Load", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Load)), ) @@ -6372,6 +6455,7 @@ _Store.typedef = typedef.TypeDef("Store", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Store)), ) @@ -6379,6 +6463,7 @@ _Del.typedef = typedef.TypeDef("Del", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Del)), ) @@ -6386,6 +6471,7 @@ _AugLoad.typedef = typedef.TypeDef("AugLoad", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_AugLoad)), ) @@ -6393,6 +6479,7 @@ _AugStore.typedef = typedef.TypeDef("AugStore", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_AugStore)), ) @@ -6400,6 +6487,7 @@ _Param.typedef = typedef.TypeDef("Param", expr_context.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Param)), ) @@ -6407,6 +6495,7 @@ slice.typedef = typedef.TypeDef("slice", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) slice.typedef.acceptable_as_base_class = False @@ -6422,6 +6511,7 @@ Ellipsis.typedef = typedef.TypeDef("Ellipsis", slice.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(Ellipsis)), __init__=interp2app(Ellipsis_init), @@ -6508,6 +6598,7 @@ Slice.typedef = typedef.TypeDef("Slice", slice.typedef, + __module__='_ast', _fields=_FieldsWrapper(['lower', 'upper', 'step']), lower=typedef.GetSetProperty(Slice_get_lower, Slice_set_lower, cls=Slice), upper=typedef.GetSetProperty(Slice_get_upper, Slice_set_upper, cls=Slice), @@ -6553,6 +6644,7 @@ ExtSlice.typedef = typedef.TypeDef("ExtSlice", slice.typedef, + __module__='_ast', _fields=_FieldsWrapper(['dims']), dims=typedef.GetSetProperty(ExtSlice_get_dims, ExtSlice_set_dims, cls=ExtSlice), __new__=interp2app(get_AST_new(ExtSlice)), @@ -6598,6 +6690,7 @@ Index.typedef = typedef.TypeDef("Index", slice.typedef, + __module__='_ast', _fields=_FieldsWrapper(['value']), value=typedef.GetSetProperty(Index_get_value, Index_set_value, cls=Index), __new__=interp2app(get_AST_new(Index)), @@ -6607,12 +6700,14 @@ boolop.typedef = typedef.TypeDef("boolop", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) boolop.typedef.acceptable_as_base_class = False _And.typedef = typedef.TypeDef("And", boolop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_And)), ) @@ -6620,6 +6715,7 @@ _Or.typedef = typedef.TypeDef("Or", boolop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Or)), ) @@ -6627,12 +6723,14 @@ operator.typedef = typedef.TypeDef("operator", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) operator.typedef.acceptable_as_base_class = False _Add.typedef = typedef.TypeDef("Add", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Add)), ) @@ -6640,6 +6738,7 @@ _Sub.typedef = typedef.TypeDef("Sub", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Sub)), ) @@ -6647,6 +6746,7 @@ _Mult.typedef = typedef.TypeDef("Mult", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Mult)), ) @@ -6654,6 +6754,7 @@ _Div.typedef = typedef.TypeDef("Div", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Div)), ) @@ -6661,6 +6762,7 @@ _Mod.typedef = typedef.TypeDef("Mod", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Mod)), ) @@ -6668,6 +6770,7 @@ _Pow.typedef = typedef.TypeDef("Pow", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Pow)), ) @@ -6675,6 +6778,7 @@ _LShift.typedef = typedef.TypeDef("LShift", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_LShift)), ) @@ -6682,6 +6786,7 @@ _RShift.typedef = typedef.TypeDef("RShift", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_RShift)), ) @@ -6689,6 +6794,7 @@ _BitOr.typedef = typedef.TypeDef("BitOr", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitOr)), ) @@ -6696,6 +6802,7 @@ _BitXor.typedef = typedef.TypeDef("BitXor", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitXor)), ) @@ -6703,6 +6810,7 @@ _BitAnd.typedef = typedef.TypeDef("BitAnd", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitAnd)), ) @@ -6710,6 +6818,7 @@ _FloorDiv.typedef = typedef.TypeDef("FloorDiv", operator.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_FloorDiv)), ) @@ -6717,12 +6826,14 @@ unaryop.typedef = typedef.TypeDef("unaryop", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) unaryop.typedef.acceptable_as_base_class = False _Invert.typedef = typedef.TypeDef("Invert", unaryop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Invert)), ) @@ -6730,6 +6841,7 @@ _Not.typedef = typedef.TypeDef("Not", unaryop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Not)), ) @@ -6737,6 +6849,7 @@ _UAdd.typedef = typedef.TypeDef("UAdd", unaryop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_UAdd)), ) @@ -6744,6 +6857,7 @@ _USub.typedef = typedef.TypeDef("USub", unaryop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_USub)), ) @@ -6751,12 +6865,14 @@ cmpop.typedef = typedef.TypeDef("cmpop", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper([]), ) cmpop.typedef.acceptable_as_base_class = False _Eq.typedef = typedef.TypeDef("Eq", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Eq)), ) @@ -6764,6 +6880,7 @@ _NotEq.typedef = typedef.TypeDef("NotEq", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_NotEq)), ) @@ -6771,6 +6888,7 @@ _Lt.typedef = typedef.TypeDef("Lt", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Lt)), ) @@ -6778,6 +6896,7 @@ _LtE.typedef = typedef.TypeDef("LtE", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_LtE)), ) @@ -6785,6 +6904,7 @@ _Gt.typedef = typedef.TypeDef("Gt", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Gt)), ) @@ -6792,6 +6912,7 @@ _GtE.typedef = typedef.TypeDef("GtE", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_GtE)), ) @@ -6799,6 +6920,7 @@ _Is.typedef = typedef.TypeDef("Is", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Is)), ) @@ -6806,6 +6928,7 @@ _IsNot.typedef = typedef.TypeDef("IsNot", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_IsNot)), ) @@ -6813,6 +6936,7 @@ _In.typedef = typedef.TypeDef("In", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_In)), ) @@ -6820,6 +6944,7 @@ _NotIn.typedef = typedef.TypeDef("NotIn", cmpop.typedef, + __module__='_ast', _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_NotIn)), ) @@ -6903,6 +7028,7 @@ comprehension.typedef = typedef.TypeDef("comprehension", AST.typedef, + __module__='_ast', _fields=_FieldsWrapper(['target', 'iter', 'ifs']), target=typedef.GetSetProperty(comprehension_get_target, comprehension_set_target, cls=comprehension), iter=typedef.GetSetProperty(comprehension_get_iter, comprehension_set_iter, cls=comprehension), @@ -6956,6 +7082,7 @@ excepthandler.typedef = typedef.TypeDef("excepthandler", AST.typedef, + __module__='_ast', _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(excepthandler_get_lineno, excepthandler_set_lineno, cls=excepthandler), col_offset=typedef.GetSetProperty(excepthandler_get_col_offset, excepthandler_set_col_offset, cls=excepthandler), @@ -7040,6 +7167,7 @@ ExceptHandler.typedef = typedef.TypeDef("ExceptHandler", excepthandler.typedef, + __module__='_ast', _fields=_FieldsWrapper(['type', 'name', 'body']), type=typedef.GetSetProperty(ExceptHandler_get_type, ExceptHandler_set_type, cls=ExceptHandler), name=typedef.GetSetProperty(ExceptHandler_get_name, ExceptHandler_set_name, cls=ExceptHandler), @@ -7152,6 +7280,7 @@ arguments.typedef = typedef.TypeDef("arguments", AST.typedef, + __module__='_ast', _fields=_FieldsWrapper(['args', 'vararg', 'kwarg', 'defaults']), args=typedef.GetSetProperty(arguments_get_args, arguments_set_args, cls=arguments), vararg=typedef.GetSetProperty(arguments_get_vararg, arguments_set_vararg, cls=arguments), @@ -7221,6 +7350,7 @@ keyword.typedef = typedef.TypeDef("keyword", AST.typedef, + __module__='_ast', _fields=_FieldsWrapper(['arg', 'value']), arg=typedef.GetSetProperty(keyword_get_arg, keyword_set_arg, cls=keyword), value=typedef.GetSetProperty(keyword_get_value, keyword_set_value, cls=keyword), @@ -7291,6 +7421,7 @@ alias.typedef = typedef.TypeDef("alias", AST.typedef, + __module__='_ast', _fields=_FieldsWrapper(['name', 'asname']), name=typedef.GetSetProperty(alias_get_name, alias_set_name, cls=alias), asname=typedef.GetSetProperty(alias_get_asname, alias_set_asname, cls=alias), diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -186,7 +186,6 @@ assert x._fields == 666 def test_pickle(self): - skip("XXX implement me") import pickle mod = self.get_ast("if y: x = 4") co = compile(mod, "", "exec") From commits-noreply at bitbucket.org Sat Mar 12 15:55:38 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 15:55:38 +0100 (CET) Subject: [pypy-svn] jitviewer default: be explicit that pypy should be on your sys.path Message-ID: <20110312145538.781C436C204@codespeak.net> Author: Alex Gaynor Branch: Changeset: r102:7b3850b099e4 Date: 2011-03-12 09:54 -0500 http://bitbucket.org/pypy/jitviewer/changeset/7b3850b099e4/ Log: be explicit that pypy should be on your sys.path diff --git a/README b/README --- a/README +++ b/README @@ -12,7 +12,8 @@ python setup.py develop -It also requires pypy to be importable (as in source code). +It also requires pypy to be importable (as in source code), you can do this +by setting your ``PYTHONPATH`` enviromental variable. Finally, run it: From commits-noreply at bitbucket.org Sat Mar 12 15:55:38 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 15:55:38 +0100 (CET) Subject: [pypy-svn] jitviewer default: merged upstream Message-ID: <20110312145538.BD59B282BA1@codespeak.net> Author: Alex Gaynor Branch: Changeset: r103:0d30e91bc625 Date: 2011-03-12 09:55 -0500 http://bitbucket.org/pypy/jitviewer/changeset/0d30e91bc625/ Log: merged upstream From commits-noreply at bitbucket.org Sat Mar 12 17:32:24 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 17:32:24 +0100 (CET) Subject: [pypy-svn] pypy default: Reduce register pressure on llong_from_int, using the PSRAD instruction Message-ID: <20110312163224.79B8036C20B@codespeak.net> Author: Armin Rigo Branch: Changeset: r42526:0d2a71cb22e1 Date: 2011-03-12 11:31 -0500 http://bitbucket.org/pypy/pypy/changeset/0d2a71cb22e1/ Log: Reduce register pressure on llong_from_int, using the PSRAD instruction to do the equivalent of CDQ but within the xmm registers. diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -552,6 +552,8 @@ MOVD_rx = xmminsn('\x66', rex_w, '\x0F\x7E', register(2, 8), register(1), '\xC0') MOVD_xr = xmminsn('\x66', rex_w, '\x0F\x6E', register(1, 8), register(2), '\xC0') + PSRAD_xi = xmminsn('\x66', rex_nw, '\x0F\x72', register(1), '\xE0', immediate(2, 'b')) + # ------------------------------------------------------------ Conditions = { diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -1138,17 +1138,17 @@ not_implemented("llong_to_int: %s" % (loc,)) def genop_llong_from_int(self, op, arglocs, resloc): - loc = arglocs[0] - if isinstance(loc, ConstFloatLoc): - self.mc.MOVSD(resloc, loc) + loc1, loc2 = arglocs + if isinstance(loc1, ConstFloatLoc): + assert loc2 is None + self.mc.MOVSD(resloc, loc1) else: - assert loc is eax + assert isinstance(loc1, RegLoc) + assert isinstance(loc2, RegLoc) assert isinstance(resloc, RegLoc) - loc2 = arglocs[1] - assert isinstance(loc2, RegLoc) - self.mc.CDQ() # eax -> eax:edx - self.mc.MOVD_xr(resloc.value, eax.value) - self.mc.MOVD_xr(loc2.value, edx.value) + self.mc.MOVD_xr(loc2.value, loc1.value) + self.mc.PSRAD_xi(loc2.value, 31) # -> 0 or -1 + self.mc.MOVD_xr(resloc.value, loc1.value) self.mc.PUNPCKLDQ_xx(resloc.value, loc2.value) def genop_llong_from_uint(self, op, arglocs, resloc): diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -716,11 +716,7 @@ loc1 = self._loc_of_const_longlong(r_longlong(box.value)) loc2 = None # unused else: - # requires the argument to be in eax, and trash edx. - loc1 = self.rm.make_sure_var_in_reg(box, selected_reg=eax) - tmpvar = TempBox() - self.rm.force_allocate_reg(tmpvar, [box], selected_reg=edx) - self.rm.possibly_free_var(tmpvar) + loc1 = self.rm.make_sure_var_in_reg(box) tmpxvar = TempBox() loc2 = self.xrm.force_allocate_reg(tmpxvar, [op.result]) self.xrm.possibly_free_var(tmpxvar) diff --git a/pypy/jit/backend/x86/test/test_rx86_32_auto_encoding.py b/pypy/jit/backend/x86/test/test_rx86_32_auto_encoding.py --- a/pypy/jit/backend/x86/test/test_rx86_32_auto_encoding.py +++ b/pypy/jit/backend/x86/test/test_rx86_32_auto_encoding.py @@ -216,6 +216,8 @@ m = modes[0] lst = tests[m]() random.shuffle(lst) + if methname == 'PSRAD_xi' and m == 'i': + lst = [x for x in lst if 0 <= x <= 31] result = [] for v in lst: result += self.make_all_tests(methname, modes[1:], args+[v]) From commits-noreply at bitbucket.org Sat Mar 12 17:51:52 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sat, 12 Mar 2011 17:51:52 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: add simplify Message-ID: <20110312165152.5891036C201@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42527:ea37f19f93a8 Date: 2011-03-12 11:51 -0500 http://bitbucket.org/pypy/pypy/changeset/ea37f19f93a8/ Log: add simplify diff --git a/pypy/jit/metainterp/optimizeopt/simplify.py b/pypy/jit/metainterp/optimizeopt/simplify.py new file mode 100644 --- /dev/null +++ b/pypy/jit/metainterp/optimizeopt/simplify.py @@ -0,0 +1,32 @@ + +from pypy.jit.metainterp.resoperation import ResOperation, rop +from pypy.jit.metainterp.optimizeopt.optimizer import Optimization +from pypy.jit.metainterp.optimizeutil import _findall + +class OptSimplify(Optimization): + def optimize_CALL_PURE(self, op): + args = op.getarglist()[1:] + self.emit_operation(ResOperation(rop.CALL, args, op.result, + op.getdescr())) + + def optimize_CALL_LOOPINVARIANT(self, op): + op = op.copy_and_change(rop.CALL) + self.emit_operation(op) + + def optimize_VIRTUAL_REF_FINISH(self, op): + pass + + def optimize_VIRTUAL_REF(self, op): + op = ResOperation(rop.SAME_AS, [op.getarg(0)], op.result) + self.emit_operation(op) + + def propagate_forward(self, op): + opnum = op.getopnum() + for value, func in optimize_ops: + if opnum == value: + func(self, op) + break + else: + self.emit_operation(op) + +optimize_ops = _findall(OptSimplify, 'optimize_') From commits-noreply at bitbucket.org Sat Mar 12 19:09:26 2011 From: commits-noreply at bitbucket.org (tav) Date: Sat, 12 Mar 2011 19:09:26 +0100 (CET) Subject: [pypy-svn] pypy default: Improved ctypes.CFUNCTYPE prototype and paramflags handling. Message-ID: <20110312180926.E9CD736C208@codespeak.net> Author: tav Branch: Changeset: r42528:04456b424578 Date: 2011-03-12 18:09 +0000 http://bitbucket.org/pypy/pypy/changeset/04456b424578/ Log: Improved ctypes.CFUNCTYPE prototype and paramflags handling. diff --git a/lib-python/modified-2.7.0/ctypes/__init__.py b/lib-python/modified-2.7.0/ctypes/__init__.py --- a/lib-python/modified-2.7.0/ctypes/__init__.py +++ b/lib-python/modified-2.7.0/ctypes/__init__.py @@ -355,11 +355,12 @@ self._handle = handle def __repr__(self): - return "<%s '%s', handle %x at %x>" % \ + return "<%s '%s', handle %r at %x>" % \ (self.__class__.__name__, self._name, - (self._handle & (_sys.maxint*2 + 1)), + (self._handle), id(self) & (_sys.maxint*2 + 1)) + def __getattr__(self, name): if name.startswith('__') and name.endswith('__'): raise AttributeError(name) diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -110,53 +110,73 @@ self.name = None self._objects = {keepalive_key(0):self} self._needs_free = True - argument = None - if len(args) == 1: - argument = args[0] - if isinstance(argument, (int, long)): - # direct construction from raw address + # Empty function object -- this is needed for casts + if not args: + self._buffer = _rawffi.Array('P')(1) + return + + args = list(args) + argument = args.pop(0) + + # Direct construction from raw address + if isinstance(argument, (int, long)) and not args: ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) - self._ptr = _rawffi.FuncPtr(argument, ffiargs, ffires, - self._flags_) + self._ptr = _rawffi.FuncPtr(argument, ffiargs, ffires, self._flags_) self._buffer = self._ptr.byptr() - elif callable(argument): - # A callback into python + return + + # A callback into python + if callable(argument) and not args: self.callable = argument ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) if self._restype_ is None: ffires = None - self._ptr = _rawffi.CallbackPtr(self._wrap_callable(argument, - self.argtypes), - ffiargs, ffires, self._flags_) + self._ptr = _rawffi.CallbackPtr(self._wrap_callable( + argument, self.argtypes + ), ffiargs, ffires, self._flags_) self._buffer = self._ptr.byptr() - elif isinstance(argument, tuple) and len(argument) == 2: - # function exported from a shared library + return + + # Function exported from a shared library + if isinstance(argument, tuple) and len(argument) == 2: import ctypes - self.name, self.dll = argument - if isinstance(self.dll, str): - self.dll = ctypes.CDLL(self.dll) + name, dll = argument + # XXX Implement support for foreign function ordinal + if not isinstance(name, basestring): + raise NotImplementedError( + "Support for foreign functions exported by ordinal " + "hasn't been implemented yet." + ) + self.name = name + if isinstance(dll, str): + self.dll = ctypes.CDLL(dll) + else: + self.dll = dll + if args: + self._paramflags = args.pop(0) + if args: + raise TypeError("Unknown constructor %s" % (args,)) # we need to check dll anyway ptr = self._getfuncptr([], ctypes.c_int) self._buffer = ptr.byptr() + return - elif (sys.platform == 'win32' and - len(args) >= 2 and isinstance(args[0], (int, long))): - # A COM function call, by index + # A COM function call, by index + if (sys.platform == 'win32' and isinstance(argument, (int, long)) + and args): ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) - self._com_index = args[0] + 0x1000 - self.name = args[1] - if len(args) > 2: - self._paramflags = args[2] - # XXX ignored iid = args[3] + self._com_index = argument + 0x1000 + self.name = args.pop(0) + if args: + self._paramflags = args.pop(0) + if args: + raise TypeError("Unknown constructor %s" % (args,)) + # XXX Implement support for the optional ``iid`` pointer to the + # interface identifier used in extended error reporting. + return - elif len(args) == 0: - # Empty function object. - # this is needed for casts - self._buffer = _rawffi.Array('P')(1) - return - else: - raise TypeError("Unknown constructor %s" % (args,)) + raise TypeError("Unknown constructor %s" % (args,)) def _wrap_callable(self, to_call, argtypes): def f(*args): @@ -166,7 +186,7 @@ return to_call(*args) return f - def __call__(self, *args): + def __call__(self, *args, **kwargs): if self.callable is not None: if len(args) == len(self._argtypes_): pass @@ -214,7 +234,7 @@ if argtypes is None: argtypes = [] - args = self._convert_args(argtypes, args) + args, output_values = self._convert_args(argtypes, args, kwargs) argtypes = [type(arg) for arg in args] restype = self._restype_ @@ -244,6 +264,11 @@ if v is not args: result = v + if output_values: + if len(output_values) == 1: + return output_values[0] + return tuple(output_values) + return result @@ -289,7 +314,7 @@ raise @staticmethod - def _conv_param(argtype, arg, index): + def _conv_param(argtype, arg): from ctypes import c_char_p, c_wchar_p, c_void_p, c_int if argtype is not None: arg = argtype.from_param(arg) @@ -312,20 +337,24 @@ return cobj - def _convert_args(self, argtypes, args): + def _convert_args(self, argtypes, args, kwargs): wrapped_args = [] + output_values = [] consumed = 0 + # XXX Implement support for kwargs/name for i, argtype in enumerate(argtypes): defaultvalue = None - if i > 0 and self._paramflags is not None: - paramflag = self._paramflags[i-1] - if len(paramflag) == 2: + if self._paramflags is not None: + paramflag = self._paramflags[i] + paramlen = len(paramflag) + name = None + if paramlen == 1: + idlflag = paramflag[0] + elif paramlen == 2: idlflag, name = paramflag - elif len(paramflag) == 3: + elif paramlen == 3: idlflag, name, defaultvalue = paramflag - else: - idlflag = 0 idlflag &= (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID) if idlflag in (0, PARAMFLAG_FIN): @@ -333,8 +362,8 @@ elif idlflag == PARAMFLAG_FOUT: import ctypes val = argtype._type_() - wrapped = (val, ctypes.byref(val)) - wrapped_args.append(wrapped) + output_values.append(val) + wrapped_args.append(ctypes.byref(val)) continue elif idlflag == PARAMFLAG_FIN | PARAMFLAG_FLCID: # Always taken from defaultvalue if given, @@ -342,12 +371,12 @@ val = defaultvalue if val is None: val = 0 - wrapped = self._conv_param(argtype, val, consumed) + wrapped = self._conv_param(argtype, val) wrapped_args.append(wrapped) continue else: raise NotImplementedError( - "paramflags = %s" % (self._paramflags[i-1],)) + "paramflags = %s" % (self._paramflags[i],)) if consumed < len(args): arg = args[consumed] @@ -357,7 +386,7 @@ raise TypeError("Not enough arguments") try: - wrapped = self._conv_param(argtype, arg, consumed) + wrapped = self._conv_param(argtype, arg) except (UnicodeError, TypeError, ValueError), e: raise ArgumentError(str(e)) wrapped_args.append(wrapped) @@ -368,11 +397,11 @@ argtypes = list(argtypes) for i, arg in enumerate(extra): try: - wrapped = self._conv_param(None, arg, i) + wrapped = self._conv_param(None, arg) except (UnicodeError, TypeError, ValueError), e: raise ArgumentError(str(e)) wrapped_args.append(wrapped) - return wrapped_args + return wrapped_args, output_values def _build_result(self, restype, resbuffer, argtypes, argsandobjs): """Build the function result: From commits-noreply at bitbucket.org Sat Mar 12 19:40:29 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sat, 12 Mar 2011 19:40:29 +0100 (CET) Subject: [pypy-svn] pypy default: fix test runs directly addressing files in ctypes_tests Message-ID: <20110312184029.7DBCC36C20C@codespeak.net> Author: holger krekel Branch: Changeset: r42529:76d5dc2b3501 Date: 2011-03-12 19:39 +0100 http://bitbucket.org/pypy/pypy/changeset/76d5dc2b3501/ Log: fix test runs directly addressing files in ctypes_tests diff --git a/pypy/module/test_lib_pypy/ctypes_tests/conftest.py b/pypy/module/test_lib_pypy/ctypes_tests/conftest.py --- a/pypy/module/test_lib_pypy/ctypes_tests/conftest.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/conftest.py @@ -1,4 +1,4 @@ -import py +import py, pytest import sys def pytest_ignore_collect(path): @@ -8,7 +8,7 @@ def compile_so_file(): from pypy.translator.platform import platform from pypy.translator.tool.cbuild import ExternalCompilationInfo - udir = py.test.ensuretemp('_ctypes_test') + udir = pytest.ensuretemp('_ctypes_test') cfile = py.path.local(__file__).dirpath().join("_ctypes_test.c") if sys.platform == 'win32': @@ -20,6 +20,8 @@ return platform.compile([cfile], eci, str(udir.join('_ctypes_test')), standalone=False) +# we need to run after the "tmpdir" plugin which installs pytest.ensuretemp + at pytest.mark.trylast def pytest_configure(config): global sofile sofile = compile_so_file() From commits-noreply at bitbucket.org Sat Mar 12 20:12:04 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 20:12:04 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Improve the test to make sure that external() actually reads Message-ID: <20110312191204.DEF8636C20C@codespeak.net> Author: Armin Rigo Branch: enable-opts Changeset: r42530:07f7e085ed08 Date: 2011-03-12 14:11 -0500 http://bitbucket.org/pypy/pypy/changeset/07f7e085ed08/ Log: Improve the test to make sure that external() actually reads '_frame', to prevent optimizations from delaying the allocation of X(). It used to work fine because this particular test used to run without optimizations. diff --git a/pypy/jit/metainterp/test/test_virtualref.py b/pypy/jit/metainterp/test/test_virtualref.py --- a/pypy/jit/metainterp/test/test_virtualref.py +++ b/pypy/jit/metainterp/test/test_virtualref.py @@ -39,15 +39,19 @@ if not isinstance(self, TestLLtype): py.test.skip("purely frontend test") # + class FooBarError(Exception): + pass class X: def __init__(self, n): self.n = n class ExCtx: - pass + _frame = None exctx = ExCtx() # @dont_look_inside def external(n): + if exctx._frame is None: + raise FooBarError if n > 100: return exctx.topframeref().n return n From commits-noreply at bitbucket.org Sat Mar 12 20:14:11 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sat, 12 Mar 2011 20:14:11 +0100 (CET) Subject: [pypy-svn] pypy default: use pytest-2.0.3.dev1 which lazy-imports zlib and others in genscript plugin Message-ID: <20110312191411.C845A36C20C@codespeak.net> Author: holger krekel Branch: Changeset: r42531:d2b00d622f59 Date: 2011-03-12 20:13 +0100 http://bitbucket.org/pypy/pypy/changeset/d2b00d622f59/ Log: use pytest-2.0.3.dev1 which lazy-imports zlib and others in genscript plugin diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -3,7 +3,7 @@ (pypy version of startup script) see http://pytest.org for details. """ -__version__ = '2.0.3.dev0' # base pytest version +__version__ = '2.0.3.dev1' # base pytest version __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins diff --git a/_pytest/genscript.py b/_pytest/genscript.py --- a/_pytest/genscript.py +++ b/_pytest/genscript.py @@ -1,8 +1,5 @@ """ generate a single-file self-contained version of py.test """ import py -import pickle -import zlib -import base64 def find_toplevel(name): for syspath in py.std.sys.path: @@ -31,9 +28,9 @@ return name2src def compress_mapping(mapping): - data = pickle.dumps(mapping, 2) - data = zlib.compress(data, 9) - data = base64.encodestring(data) + data = py.std.pickle.dumps(mapping, 2) + data = py.std.zlib.compress(data, 9) + data = py.std.base64.encodestring(data) data = data.decode('ascii') return data @@ -44,7 +41,6 @@ mapping.update(pkg_to_mapping(name)) return compress_mapping(mapping) - def generate_script(entry, packages): data = compress_packages(packages) tmpl = py.path.local(__file__).dirpath().join('standalonetemplate.py') From commits-noreply at bitbucket.org Sat Mar 12 20:22:27 2011 From: commits-noreply at bitbucket.org (tav) Date: Sat, 12 Mar 2011 20:22:27 +0100 (CET) Subject: [pypy-svn] pypy default: Extended .hg/.gitgnore for ctypes test generated files. Message-ID: <20110312192227.073D136C204@codespeak.net> Author: tav Branch: Changeset: r42532:84b280d6e013 Date: 2011-03-12 19:21 +0000 http://bitbucket.org/pypy/pypy/changeset/84b280d6e013/ Log: Extended .hg/.gitgnore for ctypes test generated files. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -13,6 +13,7 @@ ^pypy/module/cpyext/test/.+\.o$ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ +^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ ^pypy/doc/.+\.html$ ^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,7 @@ pypy/doc/*.html pypy/doc/config/*.html pypy/doc/discussion/*.html +pypy/module/test_lib_pypy/ctypes_tests/*.o pypy/translator/c/src/dtoa.o pypy/translator/goal/pypy-c pypy/translator/goal/target*-c From commits-noreply at bitbucket.org Sat Mar 12 20:22:32 2011 From: commits-noreply at bitbucket.org (tav) Date: Sat, 12 Mar 2011 20:22:32 +0100 (CET) Subject: [pypy-svn] pypy default: Updated ctypes test to match new _conv_param signature. Message-ID: <20110312192232.8747036C205@codespeak.net> Author: tav Branch: Changeset: r42533:fc64e7a76ac7 Date: 2011-03-12 19:22 +0000 http://bitbucket.org/pypy/pypy/changeset/fc64e7a76ac7/ Log: Updated ctypes test to match new _conv_param signature. diff --git a/pypy/module/test_lib_pypy/ctypes_tests/test_guess_argtypes.py b/pypy/module/test_lib_pypy/ctypes_tests/test_guess_argtypes.py --- a/pypy/module/test_lib_pypy/ctypes_tests/test_guess_argtypes.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/test_guess_argtypes.py @@ -12,7 +12,7 @@ from _ctypes.function import CFuncPtr def guess(value): - cobj = CFuncPtr._conv_param(None, value, 0) + cobj = CFuncPtr._conv_param(None, value) return type(cobj) assert guess(13) == c_int From commits-noreply at bitbucket.org Sat Mar 12 20:22:49 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sat, 12 Mar 2011 20:22:49 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Fix test_compile Message-ID: <20110312192249.CACBF36C204@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42534:dc0f57fef14d Date: 2011-03-12 14:22 -0500 http://bitbucket.org/pypy/pypy/changeset/dc0f57fef14d/ Log: Fix test_compile diff --git a/pypy/jit/metainterp/test/test_compile.py b/pypy/jit/metainterp/test/test_compile.py --- a/pypy/jit/metainterp/test/test_compile.py +++ b/pypy/jit/metainterp/test/test_compile.py @@ -4,7 +4,7 @@ from pypy.jit.metainterp.compile import ResumeGuardDescr from pypy.jit.metainterp.compile import ResumeGuardCountersInt from pypy.jit.metainterp.compile import compile_tmp_callback -from pypy.jit.metainterp import nounroll_optimize, jitprof, typesystem, compile +from pypy.jit.metainterp import jitprof, typesystem, compile from pypy.jit.metainterp.test.test_optimizeutil import LLtypeMixin from pypy.jit.tool.oparser import parse from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT @@ -26,28 +26,28 @@ assert lst == [tok1, tok2, tok3] -class FakeCPU: +class FakeCPU(object): ts = typesystem.llhelper def __init__(self): self.seen = [] def compile_loop(self, inputargs, operations, token): self.seen.append((inputargs, operations, token)) -class FakeLogger: +class FakeLogger(object): def log_loop(self, inputargs, operations, number=0, type=None): pass -class FakeState: - optimize_loop = staticmethod(nounroll_optimize.optimize_loop) - enable_opts = {} +class FakeState(object): + enable_opts = ALL_OPTS_DICT.copy() + enable_opts.pop('unroll') def attach_unoptimized_bridge_from_interp(*args): pass -class FakeGlobalData: +class FakeGlobalData(object): loopnumbering = 0 -class FakeMetaInterpStaticData: +class FakeMetaInterpStaticData(object): logger_noopt = FakeLogger() logger_ops = FakeLogger() From commits-noreply at bitbucket.org Sat Mar 12 20:26:55 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 20:26:55 +0100 (CET) Subject: [pypy-svn] pypy default: Add a small equivalent to the old py.cleanup. Message-ID: <20110312192655.3793C36C205@codespeak.net> Author: Armin Rigo Branch: Changeset: r42535:da95ea609720 Date: 2011-03-12 14:26 -0500 http://bitbucket.org/pypy/pypy/changeset/da95ea609720/ Log: Add a small equivalent to the old py.cleanup. diff --git a/pypy/tool/py.cleanup b/pypy/tool/py.cleanup new file mode 100755 --- /dev/null +++ b/pypy/tool/py.cleanup @@ -0,0 +1,16 @@ +#!/usr/bin/env python +import py, sys + +def shouldremove(p): + return p.ext == '.pyc' + +count = 0 + +for arg in sys.argv[1:] or ['.']: + path = py.path.local(arg) + print "cleaning path", path, "of .pyc files" + for x in path.visit(shouldremove, lambda x: x.check(dotfile=0, link=0)): + x.remove() + count += 1 + +print "%d files removed" % (count,) From commits-noreply at bitbucket.org Sat Mar 12 22:29:37 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 22:29:37 +0100 (CET) Subject: [pypy-svn] pypy default: inline _sre methods Message-ID: <20110312212937.6869C36C20E@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42536:77fe06f486e0 Date: 2011-03-12 16:29 -0500 http://bitbucket.org/pypy/pypy/changeset/77fe06f486e0/ Log: inline _sre methods diff --git a/pypy/module/pypyjit/policy.py b/pypy/module/pypyjit/policy.py --- a/pypy/module/pypyjit/policy.py +++ b/pypy/module/pypyjit/policy.py @@ -14,7 +14,7 @@ modname, _ = modname.split('.', 1) if modname in ['pypyjit', 'signal', 'micronumpy', 'math', 'exceptions', 'imp', 'sys', 'array', '_ffi', 'itertools', 'operator', - '_socket']: + '_socket', '_sre']: return True return False From commits-noreply at bitbucket.org Sat Mar 12 22:35:22 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 22:35:22 +0100 (CET) Subject: [pypy-svn] pypy default: GetSetProperty's functions are immutable. Message-ID: <20110312213522.A7D6936C20E@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42537:462546281356 Date: 2011-03-12 16:35 -0500 http://bitbucket.org/pypy/pypy/changeset/462546281356/ Log: GetSetProperty's functions are immutable. diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -383,6 +383,8 @@ return res class GetSetProperty(Wrappable): + _immutable_fields_ = ["fget", "fset", "fdel"] + @specialize.arg(7) def __init__(self, fget, fset=None, fdel=None, doc=None, cls=None, use_closure=False, tag=None): From commits-noreply at bitbucket.org Sat Mar 12 22:36:42 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 22:36:42 +0100 (CET) Subject: [pypy-svn] pypy default: (fijal, arigo) Message-ID: <20110312213642.8A05A282BA1@codespeak.net> Author: Armin Rigo Branch: Changeset: r42538:84f09467bad9 Date: 2011-03-12 15:03 -0500 http://bitbucket.org/pypy/pypy/changeset/84f09467bad9/ Log: (fijal, arigo) Start an optimization step that helps the JIT: when writing fields to a fresh new structure, don't record them in the WriteAnalyzer. diff --git a/pypy/translator/backendopt/canraise.py b/pypy/translator/backendopt/canraise.py --- a/pypy/translator/backendopt/canraise.py +++ b/pypy/translator/backendopt/canraise.py @@ -10,7 +10,7 @@ py.log.setconsumer("canraise", ansi_log) class RaiseAnalyzer(graphanalyze.BoolGraphAnalyzer): - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): try: return bool(LL_OPERATIONS[op.opname].canraise) except KeyError: diff --git a/pypy/jit/codewriter/effectinfo.py b/pypy/jit/codewriter/effectinfo.py --- a/pypy/jit/codewriter/effectinfo.py +++ b/pypy/jit/codewriter/effectinfo.py @@ -169,7 +169,7 @@ # ____________________________________________________________ class VirtualizableAnalyzer(BoolGraphAnalyzer): - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): return op.opname in ('jit_force_virtualizable', 'jit_force_virtual') diff --git a/pypy/translator/backendopt/test/test_writeanalyze.py b/pypy/translator/backendopt/test/test_writeanalyze.py --- a/pypy/translator/backendopt/test/test_writeanalyze.py +++ b/pypy/translator/backendopt/test/test_writeanalyze.py @@ -51,6 +51,18 @@ result = wa.analyze(ggraph.startblock.operations[-1]) assert not result + def test_write_to_new_struct(self): + class A(object): + pass + def f(x): + a = A() + a.baz = x # writes to a fresh new struct are ignored + return a + t, wa = self.translate(f, [int]) + fgraph = graphof(t, f) + result = wa.analyze_direct_call(fgraph) + assert not result + def test_method(self): class A(object): def f(self): @@ -314,14 +326,14 @@ S = lltype.GcStruct('S', ('x', lltype.Signed), adtmeths = {'yep': True, 'callme': ll_callme}) - def g(x, y, z): - p = lltype.malloc(S) + def g(p, x, y, z): p.x = x if p.yep: z *= p.callme(y) return z def f(x, y, z): - return g(x, y, z) + p = lltype.malloc(S) + return g(p, x, y, z) t, wa = self.translate(f, [int, int, int]) fgraph = graphof(t, f) diff --git a/pypy/translator/backendopt/writeanalyze.py b/pypy/translator/backendopt/writeanalyze.py --- a/pypy/translator/backendopt/writeanalyze.py +++ b/pypy/translator/backendopt/writeanalyze.py @@ -26,7 +26,9 @@ def is_top_result(result): return result is top_set - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): + if graphinfo and op.args[0] in graphinfo: + return empty_set if op.opname in ("setfield", "oosetfield"): return frozenset([ ("struct", op.args[0].concretetype, op.args[1].value)]) @@ -46,14 +48,26 @@ return self.bottom_result() return graphanalyze.GraphAnalyzer.analyze_external_method(self, op, TYPE, meth) + def compute_graph_info(self, graph): + newstructs = set() + for block in graph.iterblocks(): + for op in block.operations: + if (op.opname == 'malloc' or op.opname == 'malloc_varsize' + or op.opname == 'new'): + newstructs.add(op.result) + elif op.opname in ('cast_pointer', 'same_as'): + if op.args[0] in newstructs: + newstructs.add(op.result) + return newstructs + class ReadWriteAnalyzer(WriteAnalyzer): - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): if op.opname == "getfield": return frozenset([ ("readstruct", op.args[0].concretetype, op.args[1].value)]) elif op.opname == "getarrayitem": return frozenset([ ("readarray", op.args[0].concretetype)]) - return WriteAnalyzer.analyze_simple_operation(self, op) + return WriteAnalyzer.analyze_simple_operation(self, op, graphinfo) diff --git a/pypy/translator/backendopt/graphanalyze.py b/pypy/translator/backendopt/graphanalyze.py --- a/pypy/translator/backendopt/graphanalyze.py +++ b/pypy/translator/backendopt/graphanalyze.py @@ -27,7 +27,7 @@ # only an optimization, safe to always return False return False - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo=None): raise NotImplementedError("abstract base class") # some sensible default methods, can also be overridden @@ -64,7 +64,10 @@ result = self.join_two_results(result, sub) return result - def analyze(self, op, seen=None): + def compute_graph_info(self, graph): + return None + + def analyze(self, op, seen=None, graphinfo=None): if op.opname == "direct_call": graph = get_graph(op.args[0], self.translator) if graph is None: @@ -82,7 +85,7 @@ if graph is None: return self.analyze_external_method(op, TYPE, meth) return self.analyze_oosend(TYPE, name, seen) - return self.analyze_simple_operation(op) + return self.analyze_simple_operation(op, graphinfo) def analyze_direct_call(self, graph, seen=None): if graph in self.analyzed_calls: @@ -98,6 +101,7 @@ started_here = False seen.add(graph) result = self.bottom_result() + graphinfo = self.compute_graph_info(graph) for block in graph.iterblocks(): if block is graph.startblock: result = self.join_two_results( @@ -107,7 +111,7 @@ result, self.analyze_exceptblock(block, seen)) for op in block.operations: result = self.join_two_results( - result, self.analyze(op, seen)) + result, self.analyze(op, seen, graphinfo)) for exit in block.exits: result = self.join_two_results( result, self.analyze_link(exit, seen)) From commits-noreply at bitbucket.org Sat Mar 12 22:36:43 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 22:36:43 +0100 (CET) Subject: [pypy-svn] pypy default: (fijal, arigo) Message-ID: <20110312213643.500F9282BA1@codespeak.net> Author: Armin Rigo Branch: Changeset: r42539:0646aa086da7 Date: 2011-03-12 15:52 -0500 http://bitbucket.org/pypy/pypy/changeset/0646aa086da7/ Log: (fijal, arigo) Improve on the previous check-in to detect fresh mallocs as long as they are in the same graph. That's probably good enough. diff --git a/pypy/translator/backendopt/test/test_writeanalyze.py b/pypy/translator/backendopt/test/test_writeanalyze.py --- a/pypy/translator/backendopt/test/test_writeanalyze.py +++ b/pypy/translator/backendopt/test/test_writeanalyze.py @@ -63,6 +63,69 @@ result = wa.analyze_direct_call(fgraph) assert not result + def test_write_to_new_struct_2(self): + class A(object): + pass + def f(x): + a = A() + # a few extra blocks + i = 10 + while i > 0: + i -= 1 + # done + a.baz = x # writes to a fresh new struct are ignored + return a + t, wa = self.translate(f, [int]) + fgraph = graphof(t, f) + result = wa.analyze_direct_call(fgraph) + assert not result + + def test_write_to_new_struct_3(self): + class A(object): + pass + prebuilt = A() + def f(x): + if x > 5: + a = A() + else: + a = A() + a.baz = x + return a + t, wa = self.translate(f, [int]) + fgraph = graphof(t, f) + result = wa.analyze_direct_call(fgraph) + assert not result + + def test_write_to_new_struct_4(self): + class A(object): + pass + prebuilt = A() + def f(x): + if x > 5: + a = A() + else: + a = prebuilt + a.baz = x + return a + t, wa = self.translate(f, [int]) + fgraph = graphof(t, f) + result = wa.analyze_direct_call(fgraph) + assert len(result) == 1 and 'baz' in list(result)[0][-1] + + def test_write_to_new_struct_5(self): + class A(object): + baz = 123 + def f(x): + if x: + a = A() + else: + a = A() + a.baz += 1 + t, wa = self.translate(f, [int]) + fgraph = graphof(t, f) + result = wa.analyze_direct_call(fgraph) + assert not result + def test_method(self): class A(object): def f(self): diff --git a/pypy/translator/backendopt/writeanalyze.py b/pypy/translator/backendopt/writeanalyze.py --- a/pypy/translator/backendopt/writeanalyze.py +++ b/pypy/translator/backendopt/writeanalyze.py @@ -1,3 +1,4 @@ +from pypy.objspace.flow.model import Variable from pypy.translator.backendopt import graphanalyze from pypy.rpython.ootypesystem import ootype @@ -27,13 +28,13 @@ return result is top_set def analyze_simple_operation(self, op, graphinfo): - if graphinfo and op.args[0] in graphinfo: - return empty_set if op.opname in ("setfield", "oosetfield"): - return frozenset([ - ("struct", op.args[0].concretetype, op.args[1].value)]) + if graphinfo is None or not graphinfo.is_fresh_malloc(op.args[0]): + return frozenset([ + ("struct", op.args[0].concretetype, op.args[1].value)]) elif op.opname == "setarrayitem": - return self._array_result(op.args[0].concretetype) + if graphinfo is None or not graphinfo.is_fresh_malloc(op.args[0]): + return self._array_result(op.args[0].concretetype) return empty_set def _array_result(self, TYPE): @@ -49,16 +50,43 @@ return graphanalyze.GraphAnalyzer.analyze_external_method(self, op, TYPE, meth) def compute_graph_info(self, graph): - newstructs = set() - for block in graph.iterblocks(): + return FreshMallocs(graph) + + +class FreshMallocs(object): + def __init__(self, graph): + self.nonfresh = set(graph.getargs()) + pendingblocks = list(graph.iterblocks()) + self.allvariables = set() + for block in pendingblocks: + self.allvariables.update(block.inputargs) + pendingblocks.reverse() + while pendingblocks: + block = pendingblocks.pop() for op in block.operations: + self.allvariables.add(op.result) if (op.opname == 'malloc' or op.opname == 'malloc_varsize' or op.opname == 'new'): - newstructs.add(op.result) + continue elif op.opname in ('cast_pointer', 'same_as'): - if op.args[0] in newstructs: - newstructs.add(op.result) - return newstructs + if self.is_fresh_malloc(op.args[0]): + continue + self.nonfresh.add(op.result) + for link in block.exits: + self.nonfresh.update(link.getextravars()) + self.allvariables.update(link.getextravars()) + prevlen = len(self.nonfresh) + for v1, v2 in zip(link.args, link.target.inputargs): + if not self.is_fresh_malloc(v1): + self.nonfresh.add(v2) + if len(self.nonfresh) > prevlen: + pendingblocks.append(link.target) + + def is_fresh_malloc(self, v): + if not isinstance(v, Variable): + return False + assert v in self.allvariables + return v not in self.nonfresh class ReadWriteAnalyzer(WriteAnalyzer): From commits-noreply at bitbucket.org Sat Mar 12 22:36:44 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 22:36:44 +0100 (CET) Subject: [pypy-svn] pypy default: Fix two more subclassings of analyze_simple_operation(). Message-ID: <20110312213644.B6617282BD7@codespeak.net> Author: Armin Rigo Branch: Changeset: r42540:f0ce44bf0e3e Date: 2011-03-12 15:54 -0500 http://bitbucket.org/pypy/pypy/changeset/f0ce44bf0e3e/ Log: Fix two more subclassings of analyze_simple_operation(). diff --git a/pypy/translator/stackless/transform.py b/pypy/translator/stackless/transform.py --- a/pypy/translator/stackless/transform.py +++ b/pypy/translator/stackless/transform.py @@ -247,7 +247,7 @@ graphanalyze.GraphAnalyzer.__init__(self, translator) self.stackless_gc = stackless_gc - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): if op.opname in ('yield_current_frame_to_caller', 'resume_point', 'resume_state_invoke', 'resume_state_create', 'stack_frames_depth', 'stack_switch', 'stack_unwind', 'stack_capture', diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -46,7 +46,7 @@ return True return graphanalyze.GraphAnalyzer.analyze_external_call(self, op, seen) - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): if op.opname in ('malloc', 'malloc_varsize'): flags = op.args[1].value return flags['flavor'] == 'gc' and not flags.get('nocollect', False) From commits-noreply at bitbucket.org Sat Mar 12 22:36:45 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 12 Mar 2011 22:36:45 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110312213645.23F54282BD8@codespeak.net> Author: Armin Rigo Branch: Changeset: r42541:c13993e6fb5b Date: 2011-03-12 22:36 +0100 http://bitbucket.org/pypy/pypy/changeset/c13993e6fb5b/ Log: merge heads From commits-noreply at bitbucket.org Sat Mar 12 22:45:59 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 22:45:59 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Note another optimization Message-ID: <20110312214559.0B63536C202@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3375:92bd5104cd4b Date: 2011-03-12 16:45 -0500 http://bitbucket.org/pypy/extradoc/changeset/92bd5104cd4b/ Log: Note another optimization diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -78,6 +78,9 @@ Should be just a matter of synthesizing reverse operations in rewrite.py +- Call to ll_find right after allocating a ``newstr(1)`` should just change + the call to ll_find_char and remove the allocation if it can. + PYTHON EXAMPLES --------------- From commits-noreply at bitbucket.org Sat Mar 12 23:06:45 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 23:06:45 +0100 (CET) Subject: [pypy-svn] pypy default: strgetitem always returns a value that is greater than or equal to 0, and less than 256 Message-ID: <20110312220645.666E836C201@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42542:723ee3eb68b8 Date: 2011-03-12 17:03 -0500 http://bitbucket.org/pypy/pypy/changeset/723ee3eb68b8/ Log: strgetitem always returns a value that is greater than or equal to 0, and less than 256 diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -194,7 +194,7 @@ print print loop.preamble.inputargs print '\n'.join([str(o) for o in loop.preamble.operations]) - print + print print loop.inputargs print '\n'.join([str(o) for o in loop.operations]) print @@ -833,7 +833,7 @@ i3 = getfield_gc(p2, descr=valuedescr) escape(i3) p3 = new_with_vtable(ConstClass(node_vtable)) - setfield_gc(p3, i1, descr=valuedescr) + setfield_gc(p3, i1, descr=valuedescr) jump(i1, p3) """ # We cannot track virtuals that survive for more than two iterations. @@ -893,7 +893,7 @@ escape(i3) p2sub = new_with_vtable(ConstClass(node_vtable2)) setfield_gc(p2sub, i1, descr=valuedescr) - setfield_gc(p2, p2sub, descr=nextdescr) + setfield_gc(p2, p2sub, descr=nextdescr) jump(i1, p2, p2sub) """ expected = """ @@ -1018,7 +1018,7 @@ """ preamble = """ [i, p0] - i0 = getfield_gc(p0, descr=valuedescr) + i0 = getfield_gc(p0, descr=valuedescr) i1 = int_add(i0, i) jump(i, i1) """ @@ -3464,7 +3464,7 @@ guard_true(i1) [] i2 = int_sub(i0, 10) i3 = int_lt(i2, -5) - guard_true(i3) [] + guard_true(i3) [] jump(i0) """ expected = """ @@ -3490,7 +3490,7 @@ i1 = int_lt(i0, 4) guard_true(i1) [] i1p = int_gt(i0, -4) - guard_true(i1p) [] + guard_true(i1p) [] i2 = int_sub(i0, 10) jump(i0) """ @@ -3773,7 +3773,7 @@ ops = """ [p4, p7, i30] p16 = getfield_gc(p4, descr=valuedescr) - p17 = getarrayitem_gc(p4, 1, descr=arraydescr) + p17 = getarrayitem_gc(p4, 1, descr=arraydescr) guard_value(p16, ConstPtr(myptr), descr=) [] i1 = getfield_raw(p7, descr=nextdescr) i2 = int_add(i1, i30) @@ -3940,7 +3940,7 @@ jump(p0) """ self.optimize_loop(ops, expected, expected) - + def test_addsub_ovf(self): ops = """ [i0] @@ -4060,7 +4060,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4099,7 +4099,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4117,7 +4117,7 @@ guard_false(i7) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ preamble = """ @@ -4129,12 +4129,12 @@ guard_true(i6) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ expected = """ [i0, i1, i2, i3] - jump(i0, i1, i2, i3) + jump(i0, i1, i2, i3) """ self.optimize_loop(ops, expected, preamble) @@ -4192,7 +4192,7 @@ def test_division_to_rshift(self): ops = """ [i1, i2] - it = int_gt(i1, 0) + it = int_gt(i1, 0) guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) @@ -4210,15 +4210,15 @@ """ expected = """ [i1, i2] - it = int_gt(i1, 0) - guard_true(it)[] + it = int_gt(i1, 0) + guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) i5 = int_rshift(i1, 1) i6 = int_floordiv(3, i2) i7 = int_floordiv(i1, 3) i8 = int_floordiv(4, i2) - i9 = int_rshift(i1, 2) + i9 = int_rshift(i1, 2) i10 = int_floordiv(i1, 0) i11 = int_rshift(i1, 0) i12 = int_floordiv(i2, 2) @@ -4259,7 +4259,7 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) @@ -4283,7 +4283,7 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) @@ -4295,7 +4295,7 @@ jump(i2, i3, i1b, i2b) """ self.optimize_loop(ops, expected) - + def test_subsub_ovf(self): ops = """ [i0] @@ -4479,7 +4479,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4507,9 +4507,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_rshift(self): ops = """ @@ -4544,7 +4544,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4572,9 +4572,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_dont_backpropagate_rshift(self): ops = """ @@ -4587,7 +4587,7 @@ """ self.optimize_loop(ops, ops, ops) - + def test_mul_ovf(self): ops = """ [i0, i1] @@ -4726,7 +4726,7 @@ def sort_key(self): return id(self) - + for n in ('inst_w_seq', 'inst_index', 'inst_w_list', 'inst_length', 'inst_start', 'inst_step'): self.namespace[n] = FakeDescr(n) @@ -4768,7 +4768,7 @@ i87 = int_add(i84, i86) i91 = int_add(i80, 1) setfield_gc(p75, i91, descr=inst_index) - + p110 = same_as(ConstPtr(myptr)) i112 = same_as(3) i114 = same_as(39) @@ -4788,13 +4788,13 @@ p1 = getfield_gc(p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ preamble = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ expected = """ @@ -4807,7 +4807,7 @@ ops = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) jump(p0) @@ -4831,7 +4831,7 @@ p2 = new_with_vtable(ConstClass(node_vtable)) setfield_gc(p2, i1, descr=nextdescr) """ - + # ---------- def optimize_strunicode_loop(self, ops, optops, preamble=None): if not preamble: @@ -5113,6 +5113,23 @@ """ self.optimize_strunicode_loop(ops, expected) + def test_strgetitem_small(self): + ops = """ + [p0, i0] + i1 = strgetitem(p0, i0) + i2 = int_lt(i1, 256) + guard_true(i2) [] + i3 = int_ge(i1, 0) + guard_true(i3) [] + jump(p0, i0) + """ + expected = """ + [p0, i0] + i1 = strgetitem(p0, i0) + jump(p0, i0) + """ + self.optimize_loop(ops, expected) + # ---------- def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): from pypy.jit.metainterp.optimizeopt import string @@ -5472,7 +5489,7 @@ # more generally, supporting non-constant but virtual cases is # not obvious, because of the exception UnicodeDecodeError that # can be raised by ll_str2unicode() - + diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -1,7 +1,7 @@ from pypy.jit.metainterp.optimizeopt.optimizer import Optimization, CONST_1, CONST_0 from pypy.jit.metainterp.optimizeutil import _findall from pypy.jit.metainterp.optimizeopt.intutils import IntBound, IntUnbounded, \ - IntLowerBound + IntLowerBound, IntUpperBound from pypy.jit.metainterp.history import Const, ConstInt from pypy.jit.metainterp.resoperation import rop, ResOperation @@ -15,7 +15,7 @@ def reconstruct_for_next_iteration(self, optimizer, valuemap): assert self.posponedop is None - return self + return self def propagate_forward(self, op): if op.is_ovf(): @@ -25,7 +25,7 @@ self.nextop = op op = self.posponedop self.posponedop = None - + opnum = op.getopnum() for value, func in optimize_ops: if opnum == value: @@ -34,7 +34,7 @@ else: assert not op.is_ovf() self.emit_operation(op) - + def propagate_bounds_backward(self, box): # FIXME: This takes care of the instruction where box is the reuslt @@ -161,7 +161,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_ADD, op.getarglist()[:], op.result) - + def optimize_INT_SUB_OVF(self, op): v1 = self.getvalue(op.getarg(0)) @@ -180,7 +180,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_SUB, op.getarglist()[:], op.result) - + def optimize_INT_MUL_OVF(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) @@ -198,7 +198,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_MUL, op.getarglist()[:], op.result) - + def optimize_INT_LT(self, op): v1 = self.getvalue(op.getarg(0)) @@ -269,6 +269,12 @@ v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) + def optimize_STRGETITEM(self, op): + self.emit_operation(op) + v1 = self.getvalue(op.result) + v1.intbound.make_ge(IntLowerBound(0)) + v1.intbound.make_lt(IntUpperBound(256)) + optimize_STRLEN = optimize_ARRAYLEN_GC optimize_UNICODELEN = optimize_ARRAYLEN_GC @@ -308,7 +314,7 @@ if r.box.same_constant(CONST_1): self.make_int_gt(op.getarg(0), op.getarg(1)) else: - self.make_int_le(op.getarg(0), op.getarg(1)) + self.make_int_le(op.getarg(0), op.getarg(1)) def propagate_bounds_INT_LE(self, op): r = self.getvalue(op.result) From commits-noreply at bitbucket.org Sat Mar 12 23:06:45 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 23:06:45 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110312220645.ABBD9282BA1@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42543:cfea31534556 Date: 2011-03-12 17:06 -0500 http://bitbucket.org/pypy/pypy/changeset/cfea31534556/ Log: Merged upstream. From commits-noreply at bitbucket.org Sat Mar 12 23:14:58 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sat, 12 Mar 2011 23:14:58 +0100 (CET) Subject: [pypy-svn] pypy default: ll_find should be inlined. Message-ID: <20110312221458.573F436C204@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42544:81a38d2cd424 Date: 2011-03-12 17:14 -0500 http://bitbucket.org/pypy/pypy/changeset/81a38d2cd424/ Log: ll_find should be inlined. diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py --- a/pypy/rpython/lltypesystem/rstr.py +++ b/pypy/rpython/lltypesystem/rstr.py @@ -515,7 +515,6 @@ return count @classmethod - @purefunction def ll_find(cls, s1, s2, start, end): if start < 0: start = 0 @@ -529,11 +528,10 @@ return start elif m == 1: return cls.ll_find_char(s1, s2.chars[0], start, end) - + return cls.ll_search(s1, s2, start, end, FAST_FIND) @classmethod - @purefunction def ll_rfind(cls, s1, s2, start, end): if start < 0: start = 0 @@ -547,11 +545,10 @@ return end elif m == 1: return cls.ll_rfind_char(s1, s2.chars[0], start, end) - + return cls.ll_search(s1, s2, start, end, FAST_RFIND) @classmethod - @purefunction def ll_count(cls, s1, s2, start, end): if start < 0: start = 0 @@ -565,7 +562,7 @@ return end - start + 1 elif m == 1: return cls.ll_count_char(s1, s2.chars[0], start, end) - + res = cls.ll_search(s1, s2, start, end, FAST_COUNT) # For a few cases ll_search can return -1 to indicate an "impossible" # condition for a string match, count just returns 0 in these cases. From commits-noreply at bitbucket.org Sat Mar 12 23:58:34 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Sat, 12 Mar 2011 23:58:34 +0100 (CET) Subject: [pypy-svn] pypy default: Add _scproxy module to fix Mac OS X urllib (issue669) Message-ID: <20110312225834.9860236C20C@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42545:20b985d28a76 Date: 2011-03-12 17:25 -0500 http://bitbucket.org/pypy/pypy/changeset/20b985d28a76/ Log: Add _scproxy module to fix Mac OS X urllib (issue669) diff --git a/lib_pypy/_scproxy.py b/lib_pypy/_scproxy.py new file mode 100644 --- /dev/null +++ b/lib_pypy/_scproxy.py @@ -0,0 +1,130 @@ +"""Helper methods for urllib to fetch the proxy configuration settings using +the SystemConfiguration framework. + +""" +import sys +if sys.platform != 'darwin': + raise ImportError('Requires Mac OS X') + +from ctypes import c_int32, c_int64, c_void_p, c_char_p, c_int, cdll +from ctypes import pointer, create_string_buffer +from ctypes.util import find_library + +kCFNumberSInt32Type = 3 +kCFStringEncodingUTF8 = 134217984 + +def _CFSetup(): + sc = cdll.LoadLibrary(find_library("SystemConfiguration")) + cf = cdll.LoadLibrary(find_library("CoreFoundation")) + sctable = [ + ('SCDynamicStoreCopyProxies', [c_void_p], c_void_p), + ] + cftable = [ + ('CFArrayGetCount', [c_void_p], c_int64), + ('CFArrayGetValueAtIndex', [c_void_p, c_int64], c_void_p), + ('CFDictionaryGetValue', [c_void_p, c_void_p], c_void_p), + ('CFStringCreateWithCString', [c_void_p, c_char_p, c_int32], c_void_p), + ('CFStringGetLength', [c_void_p], c_int32), + ('CFStringGetCString', [c_void_p, c_char_p, c_int32, c_int32], c_int32), + ('CFNumberGetValue', [c_void_p, c_int, c_void_p], c_int32), + ('CFRelease', [c_void_p], None), + ] + scconst = [ + 'kSCPropNetProxiesExceptionsList', + 'kSCPropNetProxiesExcludeSimpleHostnames', + 'kSCPropNetProxiesHTTPEnable', + 'kSCPropNetProxiesHTTPProxy', + 'kSCPropNetProxiesHTTPPort', + 'kSCPropNetProxiesHTTPSEnable', + 'kSCPropNetProxiesHTTPSProxy', + 'kSCPropNetProxiesHTTPSPort', + 'kSCPropNetProxiesFTPEnable', + 'kSCPropNetProxiesFTPProxy', + 'kSCPropNetProxiesFTPPort', + 'kSCPropNetProxiesGopherEnable', + 'kSCPropNetProxiesGopherProxy', + 'kSCPropNetProxiesGopherPort', + ] + class CFProxy(object): + def __init__(self): + for mod, table in [(sc, sctable), (cf, cftable)]: + for fname, argtypes, restype in table: + func = getattr(mod, fname) + func.argtypes = argtypes + func.restype = restype + setattr(self, fname, func) + for k in scconst: + v = None + try: + v = c_void_p.in_dll(sc, k) + except ValueError: + v = None + setattr(self, k, v) + return CFProxy() +ffi = _CFSetup() + +def cfstring_to_pystring(value): + length = (ffi.CFStringGetLength(value) * 4) + 1 + buff = create_string_buffer(length) + ffi.CFStringGetCString(value, buff, length * 4, kCFStringEncodingUTF8) + return unicode(buff.value, 'utf8') + +def cfnum_to_int32(num): + result_ptr = pointer(c_int32(0)) + ffi.CFNumberGetValue(num, kCFNumberSInt32Type, result_ptr) + return result_ptr[0] + +def _get_proxy_settings(): + result = {'exclude_simple': False} + cfdct = ffi.SCDynamicStoreCopyProxies(None) + if not cfdct: + return result + try: + k = ffi.kSCPropNetProxiesExcludeSimpleHostnames + if k: + cfnum = ffi.CFDictionaryGetValue(cfdct, k) + if cfnum: + result['exclude_simple'] = bool(cfnum_to_int32(cfnum)) + k = ffi.kSCPropNetProxiesExceptionsList + if k: + cfarr = ffi.CFDictionaryGetValue(cfdct, k) + if cfarr: + lst = [] + for i in range(ffi.CFArrayGetCount(cfarr)): + cfstr = ffi.CFArrayGetValueAtIndex(cfarr, i) + if cfstr: + v = cfstring_to_pystring(cfstr) + else: + v = None + lst.append(v) + result['exceptions'] = lst + return result + finally: + ffi.CFRelease(cfdct) + +def _get_proxies(): + result = {} + cfdct = ffi.SCDynamicStoreCopyProxies(None) + if not cfdct: + return result + try: + for proto in 'HTTP', 'HTTPS', 'FTP', 'Gopher': + enabled_key = getattr(ffi, 'kSCPropNetProxies' + proto + 'Enable') + proxy_key = getattr(ffi, 'kSCPropNetProxies' + proto + 'Proxy') + port_key = getattr(ffi, 'kSCPropNetProxies' + proto + 'Port') + cfnum = ffi.CFDictionaryGetValue(cfdct, enabled_key) + if cfnum and cfnum_to_int32(cfnum): + cfhoststr = ffi.CFDictionaryGetValue(cfdct, proxy_key) + cfportnum = ffi.CFDictionaryGetValue(cfdct, port_key) + if cfhoststr: + host = cfstring_to_pystring(cfhoststr) + if host: + if cfportnum: + port = cfnum_to_int32(cfportnum) + v = u'http://%s:%d' % (host, port) + else: + v = u'http://%s' % (host,) + result[proto.lower()] = v + return result + finally: + ffi.CFRelease(cfdct) From commits-noreply at bitbucket.org Sun Mar 13 03:15:39 2011 From: commits-noreply at bitbucket.org (tav) Date: Sun, 13 Mar 2011 03:15:39 +0100 (CET) Subject: [pypy-svn] pypy default: Added support for PYPY_LOCALBASE in translator.platform. Message-ID: <20110313021539.6CB2536C201@codespeak.net> Author: tav Branch: Changeset: r42546:f93911d7479c Date: 2011-03-13 02:15 +0000 http://bitbucket.org/pypy/pypy/changeset/f93911d7479c/ Log: Added support for PYPY_LOCALBASE in translator.platform. diff --git a/pypy/translator/platform/posix.py b/pypy/translator/platform/posix.py --- a/pypy/translator/platform/posix.py +++ b/pypy/translator/platform/posix.py @@ -1,10 +1,9 @@ +"""Base support for POSIX-like platforms.""" -""" Base class for all posixish platforms -""" +import py, os +from pypy.tool import autopath from pypy.translator.platform import Platform, log, _run_subprocess -from pypy.tool import autopath -import py, os, sys class BasePosix(Platform): exe_ext = '' @@ -21,13 +20,13 @@ self.cc = cc def _libs(self, libraries): - return ['-l%s' % (lib,) for lib in libraries] + return ['-l%s' % lib for lib in libraries] def _libdirs(self, library_dirs): - return ['-L%s' % (ldir,) for ldir in library_dirs] + return ['-L%s' % ldir for ldir in library_dirs] def _includedirs(self, include_dirs): - return ['-I%s' % (idir,) for idir in include_dirs] + return ['-I%s' % idir for idir in include_dirs] def _linkfiles(self, link_files): return list(link_files) diff --git a/pypy/translator/platform/linux.py b/pypy/translator/platform/linux.py --- a/pypy/translator/platform/linux.py +++ b/pypy/translator/platform/linux.py @@ -1,6 +1,5 @@ +"""Support for Linux.""" -import py, os -from pypy.translator.platform import _run_subprocess from pypy.translator.platform.posix import BasePosix class BaseLinux(BasePosix): @@ -17,11 +16,11 @@ def _args_for_shared(self, args): return ['-shared'] + args - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): return self._pkg_config("libffi", "--cflags-only-I", ['/usr/include/libffi']) - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): return self._pkg_config("libffi", "--libs-only-L", ['/usr/lib/libffi']) @@ -29,6 +28,7 @@ class Linux(BaseLinux): shared_only = () # it seems that on 32-bit linux, compiling with -fPIC # gives assembler that asmgcc is not happy about. + def library_dirs_for_libffi_a(self): # places where we need to look for libffi.a return self.library_dirs_for_libffi() + ['/usr/lib'] diff --git a/pypy/translator/platform/windows.py b/pypy/translator/platform/windows.py --- a/pypy/translator/platform/windows.py +++ b/pypy/translator/platform/windows.py @@ -1,9 +1,11 @@ +"""Support for Windows.""" import py, os, sys, re -from pypy.translator.platform import CompilationError, ExecutionResult + +from pypy.tool import autopath +from pypy.translator.platform import CompilationError from pypy.translator.platform import log, _run_subprocess from pypy.translator.platform import Platform, posix -from pypy.tool import autopath def Windows(cc=None): if cc == 'mingw32': @@ -355,10 +357,10 @@ def _args_for_shared(self, args): return ['-shared'] + args - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): return [] - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): return [] def _handle_error(self, returncode, stdout, stderr, outname): diff --git a/pypy/translator/platform/maemo.py b/pypy/translator/platform/maemo.py --- a/pypy/translator/platform/maemo.py +++ b/pypy/translator/platform/maemo.py @@ -1,9 +1,11 @@ +"""Support for Maemo.""" + import py, os + +from pypy.tool.udir import udir +from pypy.translator.platform import ExecutionResult, log from pypy.translator.platform.linux import Linux -from pypy.translator.platform.posix import _run_subprocess, GnuMakefile -from pypy.translator.platform import ExecutionResult, log -from pypy.tool.udir import udir -from pypy.tool import autopath +from pypy.translator.platform.posix import GnuMakefile, _run_subprocess def check_scratchbox(): # in order to work, that file must exist and be executable by us @@ -74,11 +76,11 @@ env) return ExecutionResult(returncode, stdout, stderr) - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): # insanely obscure dir return ['/usr/include/arm-linux-gnueabi/'] - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): # on the other hand, library lands in usual place... return [] diff --git a/pypy/translator/platform/distutils_platform.py b/pypy/translator/platform/distutils_platform.py --- a/pypy/translator/platform/distutils_platform.py +++ b/pypy/translator/platform/distutils_platform.py @@ -1,6 +1,6 @@ +import py, os, sys from pypy.translator.platform import Platform, log, CompilationError -import py, sys, os from pypy.translator.tool import stdoutcapture def log_spawned_cmd(spawn): @@ -105,7 +105,7 @@ fdump.write(data) fdump.close() except (distutils.errors.CompileError, - distutils.errors.LinkError), e: + distutils.errors.LinkError): raise CompilationError('', data) except: print >>sys.stderr, data @@ -157,9 +157,9 @@ extra_preargs=self.link_extra, library_dirs=self.eci.library_dirs) - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): return ['/usr/include/libffi'] - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): return ['/usr/lib/libffi'] diff --git a/pypy/translator/platform/freebsd.py b/pypy/translator/platform/freebsd.py --- a/pypy/translator/platform/freebsd.py +++ b/pypy/translator/platform/freebsd.py @@ -1,5 +1,7 @@ +"""Support for FreeBSD.""" -import py, os +import os + from pypy.translator.platform import posix def get_env(key, default): @@ -10,7 +12,6 @@ def get_env_vector(key, default): string = get_env(key, default) - print key, string, default # XXX: handle quotes return string.split() @@ -42,10 +43,10 @@ res_lib_dirs.append(os.path.join(get_env("LOCALBASE", "/usr/local"), "lib")) return res_lib_dirs - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): return [os.path.join(get_env("LOCALBASE", "/usr/local"), "include")] - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): return [os.path.join(get_env("LOCALBASE", "/usr/local"), "lib")] class Freebsd_64(Freebsd): diff --git a/pypy/translator/platform/__init__.py b/pypy/translator/platform/__init__.py --- a/pypy/translator/platform/__init__.py +++ b/pypy/translator/platform/__init__.py @@ -1,17 +1,14 @@ +"""Platform-specific support for compiling/executing C sources.""" -""" Platform object that allows you to compile/execute C sources for given -platform. -""" +import py, os, sys -import sys, py, os +from pypy.tool.ansi_print import ansi_log +from pypy.tool.runsubprocess import run_subprocess as _run_subprocess +from pypy.tool.udir import udir -from pypy.tool.udir import udir -from pypy.tool.ansi_print import ansi_log log = py.log.Producer("platform") py.log.setconsumer("platform", ansi_log) -from pypy.tool.runsubprocess import run_subprocess as _run_subprocess - class CompilationError(Exception): def __init__(self, out, err): self.out = out.replace('\r\n', '\n') @@ -145,11 +142,17 @@ break return response_file + def preprocess_include_dirs(self, include_dirs): + if 'PYPY_LOCALBASE' in os.environ: + dirs = list(self._preprocess_include_dirs(include_dirs)) + return [os.environ['PYPY_LOCALBASE'] + '/include'] + dirs + return self._preprocess_include_dirs(include_dirs) + def _preprocess_include_dirs(self, include_dirs): return include_dirs def _compile_args_from_eci(self, eci, standalone): - include_dirs = self._preprocess_include_dirs(eci.include_dirs) + include_dirs = self.preprocess_include_dirs(eci.include_dirs) args = self._includedirs(include_dirs) if standalone: extra = self.standalone_only @@ -158,11 +161,17 @@ cflags = list(self.cflags) + list(extra) return (cflags + list(eci.compile_extra) + args) + def preprocess_library_dirs(self, library_dirs): + if 'PYPY_LOCALBASE' in os.environ: + dirs = list(self._preprocess_library_dirs(library_dirs)) + return [os.environ['PYPY_LOCALBASE'] + '/lib'] + dirs + return self._preprocess_library_dirs(library_dirs) + def _preprocess_library_dirs(self, library_dirs): return library_dirs def _link_args_from_eci(self, eci, standalone): - library_dirs = self._preprocess_library_dirs(eci.library_dirs) + library_dirs = self.preprocess_library_dirs(eci.library_dirs) library_dirs = self._libdirs(library_dirs) libraries = self._libs(eci.libraries) link_files = self._linkfiles(eci.link_files) @@ -198,9 +207,21 @@ # below are some detailed informations for platforms def include_dirs_for_libffi(self): + dirs = self._include_dirs_for_libffi() + if 'PYPY_LOCALBASE' in os.environ: + return [os.environ['PYPY_LOCALBASE'] + '/include'] + dirs + return dirs + + def library_dirs_for_libffi(self): + dirs = self._library_dirs_for_libffi() + if 'PYPY_LOCALBASE' in os.environ: + return [os.environ['PYPY_LOCALBASE'] + '/lib'] + dirs + return dirs + + def _include_dirs_for_libffi(self): raise NotImplementedError("Needs to be overwritten") - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): raise NotImplementedError("Needs to be overwritten") def check___thread(self): diff --git a/pypy/translator/platform/darwin.py b/pypy/translator/platform/darwin.py --- a/pypy/translator/platform/darwin.py +++ b/pypy/translator/platform/darwin.py @@ -1,5 +1,7 @@ +"""Support for OS X.""" -import py, os +import os + from pypy.translator.platform import posix class Darwin(posix.BasePosix): @@ -10,7 +12,7 @@ standalone_only = ('-mdynamic-no-pic',) shared_only = () - so_ext = 'so' + so_ext = 'dylib' # NOTE: GCC 4.2 will fail at runtime due to subtle issues, possibly # related to GC roots. Using LLVM-GCC or Clang will break the build. @@ -29,22 +31,10 @@ + ['-dynamiclib', '-undefined', 'dynamic_lookup'] + args) - def _preprocess_include_dirs(self, include_dirs): - res_incl_dirs = list(include_dirs) - res_incl_dirs.append('/usr/local/include') # Homebrew - res_incl_dirs.append('/opt/local/include') # MacPorts - return res_incl_dirs - - def _preprocess_library_dirs(self, library_dirs): - res_lib_dirs = list(library_dirs) - res_lib_dirs.append('/usr/local/lib') # Homebrew - res_lib_dirs.append('/opt/local/lib') # MacPorts - return res_lib_dirs - - def include_dirs_for_libffi(self): + def _include_dirs_for_libffi(self): return ['/usr/include/ffi'] - def library_dirs_for_libffi(self): + def _library_dirs_for_libffi(self): return ['/usr/lib'] def check___thread(self): From commits-noreply at bitbucket.org Sun Mar 13 11:10:46 2011 From: commits-noreply at bitbucket.org (tav) Date: Sun, 13 Mar 2011 11:10:46 +0100 (CET) Subject: [pypy-svn] pypy default: Fixed up ctypes doctests to reflect CArgObject's repr. Message-ID: <20110313101046.ACEF3282BAA@codespeak.net> Author: tav Branch: Changeset: r42547:0d801895ff56 Date: 2011-03-13 10:10 +0000 http://bitbucket.org/pypy/pypy/changeset/0d801895ff56/ Log: Fixed up ctypes doctests to reflect CArgObject's repr. diff --git a/lib-python/modified-2.7.0/ctypes/test/test_objects.py b/lib-python/modified-2.7.0/ctypes/test/test_objects.py --- a/lib-python/modified-2.7.0/ctypes/test/test_objects.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_objects.py @@ -22,7 +22,7 @@ >>> array[4] = 'foo bar' >>> array._objects -{'4': 'foo bar'} +{'4': } >>> array[4] 'foo bar' >>> @@ -47,9 +47,9 @@ >>> x.array[0] = 'spam spam spam' >>> x._objects -{'0:2': 'spam spam spam'} +{'0:2': } >>> x.array._b_base_._objects -{'0:2': 'spam spam spam'} +{'0:2': } >>> ''' From commits-noreply at bitbucket.org Sun Mar 13 11:50:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 11:50:01 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the malloc/free pair: if we hit "unknown hash function", then Message-ID: <20110313105001.916CC282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42548:ab70ac82034f Date: 2011-03-13 06:42 -0400 http://bitbucket.org/pypy/pypy/changeset/ab70ac82034f/ Log: Fix the malloc/free pair: if we hit "unknown hash function", then self.ctx is left in an uninitialized state, which will crash EVP_MD_CTX_cleanup(). diff --git a/pypy/module/_hashlib/interp_hashlib.py b/pypy/module/_hashlib/interp_hashlib.py --- a/pypy/module/_hashlib/interp_hashlib.py +++ b/pypy/module/_hashlib/interp_hashlib.py @@ -13,9 +13,10 @@ algorithms = ('md5', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512') class W_Hash(Wrappable): + ctx = lltype.nullptr(ropenssl.EVP_MD_CTX.TO) + def __init__(self, space, name): self.name = name - self.ctx = lltype.malloc(ropenssl.EVP_MD_CTX.TO, flavor='raw') # Allocate a lock for each HASH object. # An optimization would be to not release the GIL on small requests, @@ -26,12 +27,15 @@ if not digest: raise OperationError(space.w_ValueError, space.wrap("unknown hash function")) - ropenssl.EVP_DigestInit(self.ctx, digest) + ctx = lltype.malloc(ropenssl.EVP_MD_CTX.TO, flavor='raw') + ropenssl.EVP_DigestInit(ctx, digest) + self.ctx = ctx def __del__(self): # self.lock.free() - ropenssl.EVP_MD_CTX_cleanup(self.ctx) - lltype.free(self.ctx, flavor='raw') + if self.ctx: + ropenssl.EVP_MD_CTX_cleanup(self.ctx) + lltype.free(self.ctx, flavor='raw') def descr_repr(self, space): addrstring = self.getaddrstring(space) From commits-noreply at bitbucket.org Sun Mar 13 11:50:02 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 11:50:02 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110313105002.14BE4282BD8@codespeak.net> Author: Armin Rigo Branch: Changeset: r42549:88380409d98d Date: 2011-03-13 11:49 +0100 http://bitbucket.org/pypy/pypy/changeset/88380409d98d/ Log: merge heads From commits-noreply at bitbucket.org Sun Mar 13 11:50:02 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 11:50:02 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110313105002.7DE20282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42550:73de8074fc3d Date: 2011-03-13 11:49 +0100 http://bitbucket.org/pypy/pypy/changeset/73de8074fc3d/ Log: merge heads From commits-noreply at bitbucket.org Sun Mar 13 11:55:59 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 11:55:59 +0100 (CET) Subject: [pypy-svn] pypy default: Tentative fix for one issue reported by Greg Price: Message-ID: <20110313105559.1783F282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42551:72ce40f4803c Date: 2011-03-13 06:55 -0400 http://bitbucket.org/pypy/pypy/changeset/72ce40f4803c/ Log: Tentative fix for one issue reported by Greg Price: makes "list += object" work if object implements __radd__(). Thanks Philip Jenvey for the idea. diff --git a/pypy/objspace/std/listobject.py b/pypy/objspace/std/listobject.py --- a/pypy/objspace/std/listobject.py +++ b/pypy/objspace/std/listobject.py @@ -123,7 +123,12 @@ def inplace_add__List_ANY(space, w_list1, w_iterable2): - list_extend__List_ANY(space, w_list1, w_iterable2) + try: + list_extend__List_ANY(space, w_list1, w_iterable2) + except OperationError, e: + if e.match(space, space.w_TypeError): + raise FailedToImplement + raise return w_list1 def inplace_add__List_List(space, w_list1, w_list2): diff --git a/pypy/objspace/std/test/test_listobject.py b/pypy/objspace/std/test/test_listobject.py --- a/pypy/objspace/std/test/test_listobject.py +++ b/pypy/objspace/std/test/test_listobject.py @@ -548,6 +548,15 @@ assert l is l0 assert l == [1,2,3,4,5] + def test_iadd_subclass(self): + class Bar(object): + def __radd__(self, other): + return ('radd', self, other) + bar = Bar() + l1 = [1,2,3] + l1 += bar + assert l1 == ('radd', bar, [1,2,3]) + def test_imul(self): l = l0 = [4,3] l *= 2 From commits-noreply at bitbucket.org Sun Mar 13 12:03:13 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 13 Mar 2011 12:03:13 +0100 (CET) Subject: [pypy-svn] pypy jit-usable_retrace: propper overflow support (work in progress, curently broken) Message-ID: <20110313110313.9B53E282BD4@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42552:1f9c0df1bc07 Date: 2011-03-13 09:39 +0100 http://bitbucket.org/pypy/pypy/changeset/1f9c0df1bc07/ Log: propper overflow support (work in progress, curently broken) diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -493,9 +493,14 @@ op = self.optimizer.producer[box] ok = False - if op.is_always_pure() or op.is_ovf(): + if op.is_always_pure(): ok = True - # FIXME: Allow getitems if they are still in the heap cache + elif op.is_ovf() and op in self.optimizer.overflow_guarded: + ok = True + elif op.has_no_side_effect(): + # FIXME: When are these safe to include? Allow getitems only + # if they are still in the heap cache? + ok = True elif op.getopnum() == rop.CALL: effectinfo = op.getdescr().get_extra_info() if effectinfo.extraeffect == EffectInfo.EF_LOOPINVARIANT: @@ -507,7 +512,11 @@ self.produce_box_in_short_preamble(arg) if self.short_operations is not None: self.short_operations.append(op) + guard = ResOperation(rop.GUARD_NO_OVERFLOW, [], None) + self.short_operations.append(guard) else: + import pdb; pdb.set_trace() + self.short_operations = None def create_short_preamble(self, preamble, loop): diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -9,13 +9,19 @@ """Keeps track of the bounds placed on integers by guards and remove redundant guards""" + def __init__(self): + self.overflow_guarded = {} + def setup(self): self.posponedop = None self.nextop = None - + self.optimizer.overflow_guarded = self.overflow_guarded + def reconstruct_for_next_iteration(self, optimizer, valuemap): assert self.posponedop is None - return self + new = OptIntBounds() + new.overflow_guarded = self.optimizer.overflow_guarded + return new def propagate_forward(self, op): if op.is_ovf(): @@ -159,6 +165,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_ADD, op.getarglist()[:], op.result) + self.optimizer.overflow_guarded[op] = True def optimize_INT_SUB_OVF(self, op): @@ -178,6 +185,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_SUB, op.getarglist()[:], op.result) + self.optimizer.overflow_guarded[op] = True def optimize_INT_MUL_OVF(self, op): v1 = self.getvalue(op.getarg(0)) @@ -196,6 +204,7 @@ if self.nextop.getopnum() == rop.GUARD_NO_OVERFLOW: # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_MUL, op.getarglist()[:], op.result) + self.optimizer.overflow_guarded[op] = True def optimize_INT_LT(self, op): diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -379,7 +379,7 @@ 'int_add': 1, 'int_sub': 1, 'int_gt': 1, 'jump': 1}) - def test_loop_invariant_mul_ovf(self): + def test_loop_invariant_mul_ovf1(self): myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x']) def f(x, y): res = 0 @@ -398,6 +398,26 @@ 'int_lshift': 1, 'jump': 1}) + def test_loop_invariant_mul_ovf2(self): + myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x']) + def f(x, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x=x, y=y, res=res) + myjitdriver.jit_merge_point(x=x, y=y, res=res) + b = y * 2 + try: + res += ovfcheck(x * x) + b + except OverflowError: + res += 1 + y -= 1 + return res + res = self.meta_interp(f, [sys.maxint, 7]) + assert res == f(sys.maxint, 7) + self.check_loop_count(1) + res = self.meta_interp(f, [6, 7]) + assert res == 308 + def test_loop_invariant_mul_bridge1(self): myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x']) def f(x, y): @@ -414,6 +434,43 @@ assert res == 3427 self.check_loop_count(3) + def test_loop_invariant_mul_bridge_ovf1(self): + myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x1', 'x2']) + def f(x1, x2, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x1=x1, x2=x2, y=y, res=res) + myjitdriver.jit_merge_point(x1=x1, x2=x2, y=y, res=res) + try: + res += ovfcheck(x1 * x1) + except OverflowError: + res += 1 + if y<32 and (y>>2)&1==0: + x1, x2 = x2, x1 + y -= 1 + return res + res = self.meta_interp(f, [6, sys.maxint, 48]) + assert res == f(6, sys.maxint, 48) + + def test_loop_invariant_mul_bridge_ovf2(self): + myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x1', 'x2', 'n']) + def f(x1, x2, n, y): + res = 0 + while y > 0: + myjitdriver.can_enter_jit(x1=x1, x2=x2, y=y, res=res, n=n) + myjitdriver.jit_merge_point(x1=x1, x2=x2, y=y, res=res, n=n) + try: + res += ovfcheck(x1 * x1) + except OverflowError: + res += 1 + #if y>2)&1==0: + y -= 1 + if (y>>2)&1==0: + x1, x2 = x2, x1 + return res + res = self.meta_interp(f, [6, sys.maxint, 32, 48]) + assert res == f(6, sys.maxint, 32, 48) + def test_loop_invariant_mul_bridge_maintaining1(self): myjitdriver = JitDriver(greens = [], reds = ['y', 'res', 'x']) def f(x, y): diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -13,8 +13,11 @@ This includes already executed operations and constants. """ - def reconstruct_for_next_iteration(self, optimizer, valuemap): - return self + def setup(self): + pass + + def reconstruct_for_next_iteration(self, optimizer, valuemap): + return OptRewrite() def propagate_forward(self, op): args = self.optimizer.make_args_key(op) From commits-noreply at bitbucket.org Sun Mar 13 12:03:14 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 13 Mar 2011 12:03:14 +0100 (CET) Subject: [pypy-svn] pypy default: simplifying debugging Message-ID: <20110313110314.28B53282BD4@codespeak.net> Author: Hakan Ardo Branch: Changeset: r42553:b4860497f310 Date: 2011-03-13 11:00 +0100 http://bitbucket.org/pypy/pypy/changeset/b4860497f310/ Log: simplifying debugging diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -40,7 +40,9 @@ cmdline += ['--jit', '%s=%s' % (key, value)] cmdline.append(str(self.filepath)) # + print cmdline, logfile env={'PYPYLOG': 'jit-log-opt,jit-summary:' + str(logfile)} + #env={'PYPYLOG': ':' + str(logfile)} pipe = subprocess.Popen(cmdline, env=env, stdout=subprocess.PIPE, From commits-noreply at bitbucket.org Sun Mar 13 12:03:15 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 13 Mar 2011 12:03:15 +0100 (CET) Subject: [pypy-svn] pypy default: recreated the issue from test_pypy_c_new.test_f1 Message-ID: <20110313110315.E2CCF282BDD@codespeak.net> Author: Hakan Ardo Branch: Changeset: r42554:b7dfa411c360 Date: 2011-03-13 12:02 +0100 http://bitbucket.org/pypy/pypy/changeset/b7dfa411c360/ Log: recreated the issue from test_pypy_c_new.test_f1 diff --git a/pypy/jit/metainterp/test/test_loop.py b/pypy/jit/metainterp/test/test_loop.py --- a/pypy/jit/metainterp/test/test_loop.py +++ b/pypy/jit/metainterp/test/test_loop.py @@ -400,6 +400,54 @@ res = self.meta_interp(f, [25, th]) assert res == expected + def test_nested_loops_discovered_by_bridge_virtual(self): + # Same loop as above, but with virtuals + class A: + def __init__(self, val): + self.val = val + def add(self, val): + return A(self.val + val) + myjitdriver = JitDriver(greens = ['pos'], reds = ['i', 'j', 'n', 'x']) + bytecode = "IzJxji" + def f(nval, threshold): + myjitdriver.set_param('threshold', threshold) + i, j, x = A(0), A(0), A(0) + n = A(nval) + pos = 0 + op = '-' + while pos < len(bytecode): + myjitdriver.jit_merge_point(pos=pos, i=i, j=j, n=n, x=x) + op = bytecode[pos] + if op == 'z': + j = A(0) + elif op == 'i': + i = i.add(1) + pos = 0 + myjitdriver.can_enter_jit(pos=pos, i=i, j=j, n=n, x=x) + continue + elif op == 'j': + j = j.add(1) + pos = 2 + myjitdriver.can_enter_jit(pos=pos, i=i, j=j, n=n, x=x) + continue + elif op == 'I': + if not (i.val < n.val): + pos = 5 + elif op == 'J': + if not (j.val <= i.val): + pos = 4 + elif op == 'x': + x = x.add(i.val & j.val) + + pos += 1 + + return x.val + + for th in (5, 3, 1, 2, 4): # Start with the interesting case + expected = f(25, th) + res = self.meta_interp(f, [25, th]) + assert res == expected + def test_two_bridged_loops(self): myjitdriver = JitDriver(greens = ['pos'], reds = ['i', 'n', 's', 'x']) bytecode = "zI7izI8i" diff --git a/pypy/jit/metainterp/test/test_virtual.py b/pypy/jit/metainterp/test/test_virtual.py --- a/pypy/jit/metainterp/test/test_virtual.py +++ b/pypy/jit/metainterp/test/test_virtual.py @@ -740,6 +740,23 @@ return i.value + j.value assert self.meta_interp(f, []) == 20 + def test_virtual_skipped_by_bridge(self): + myjitdriver = JitDriver(greens = [], reds = ['n', 'm', 'i', 'x']) + def f(n, m): + x = self._new() + x.value = 0 + i = 0 + while i < n: + myjitdriver.can_enter_jit(n=n, m=m, i=i, x=x) + myjitdriver.jit_merge_point(n=n, m=m, i=i, x=x) + if i&m != m: + newx = self._new() + newx.value = x.value + i + x = newx + i = i + 1 + return x.value + res = self.meta_interp(f, [0x1F, 0x11]) + assert res == f(0x1F, 0x11) class VirtualMiscTests: From commits-noreply at bitbucket.org Sun Mar 13 12:03:16 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 13 Mar 2011 12:03:16 +0100 (CET) Subject: [pypy-svn] pypy default: hg merge Message-ID: <20110313110316.A8BC6282BDE@codespeak.net> Author: Hakan Ardo Branch: Changeset: r42555:6d62523eb3a5 Date: 2011-03-13 12:02 +0100 http://bitbucket.org/pypy/pypy/changeset/6d62523eb3a5/ Log: hg merge From commits-noreply at bitbucket.org Sun Mar 13 12:58:45 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 12:58:45 +0100 (CET) Subject: [pypy-svn] pypy default: Fix on 64-bits. Message-ID: <20110313115845.4CB0D36C201@codespeak.net> Author: Armin Rigo Branch: Changeset: r42556:ea3243c39b7b Date: 2011-03-13 07:58 -0400 http://bitbucket.org/pypy/pypy/changeset/ea3243c39b7b/ Log: Fix on 64-bits. diff --git a/pypy/objspace/fake/objspace.py b/pypy/objspace/fake/objspace.py --- a/pypy/objspace/fake/objspace.py +++ b/pypy/objspace/fake/objspace.py @@ -25,7 +25,7 @@ str_dummy = make_dummy('foo', 'bar') bool_dummy = make_dummy(True, False) unicode_dummy = make_dummy(u'abc', u'cde') -bigint_dummy = make_dummy(rbigint([0]), rbigint([1])) +bigint_dummy = make_dummy(rbigint.fromint(0), rbigint.fromint(1)) class FakeObjSpace(ObjSpace): w_None = W_Object() From commits-noreply at bitbucket.org Sun Mar 13 15:04:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 15:04:01 +0100 (CET) Subject: [pypy-svn] pypy default: Split the '__flags__' interp-level field of W_TypeObject Message-ID: <20110313140401.5DE14282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42557:bd3c564c333b Date: 2011-03-13 08:26 -0400 http://bitbucket.org/pypy/pypy/changeset/bd3c564c333b/ Log: Split the '__flags__' interp-level field of W_TypeObject into individual boolean fields, which looks better anyway in (R)Python code. This allows a fix: _ABSTRACT is not an immutable flag, whereas the others are. diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -32,6 +32,7 @@ from pypy.rlib.objectmodel import specialize from pypy.module.__builtin__.abstractinst import abstract_issubclass_w from pypy.module.__builtin__.interp_classobj import W_ClassObject +from pypy.rlib import jit WARN_ABOUT_MISSING_SLOT_FUNCTIONS = False @@ -267,6 +268,7 @@ PyMember_SetOne(space, w_self, self.member, w_value) class W_PyCTypeObject(W_TypeObject): + @jit.dont_look_inside def __init__(self, space, pto): bases_w = space.fixedview(from_ref(space, pto.c_tp_bases)) dict_w = {} @@ -285,7 +287,7 @@ W_TypeObject.__init__(self, space, extension_name, bases_w or [space.w_object], dict_w) - self.__flags__ = _CPYTYPE + self.flag_cpytype = True @bootstrap_function def init_typeobject(space): diff --git a/pypy/objspace/std/typeobject.py b/pypy/objspace/std/typeobject.py --- a/pypy/objspace/std/typeobject.py +++ b/pypy/objspace/std/typeobject.py @@ -10,13 +10,9 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.objectmodel import current_object_addr_as_int, compute_hash from pypy.rlib.jit import hint, purefunction_promote, we_are_jitted -from pypy.rlib.jit import purefunction +from pypy.rlib.jit import purefunction, dont_look_inside from pypy.rlib.rarithmetic import intmask, r_uint -from copy_reg import _HEAPTYPE -_CPYTYPE = 1 # used for non-heap types defined in C -_ABSTRACT = 1 << 20 - # from compiler/misc.py MANGLE_LEN = 256 # magic constant from compile.c @@ -80,7 +76,9 @@ # other changes to the type (e.g. the name) leave it unchanged _version_tag = None - _immutable_fields_ = ["__flags__", + _immutable_fields_ = ["flag_heaptype", + "flag_cpytype", + # flag_abstract is not immutable 'needsdel', 'weakrefable', 'hasdict', @@ -98,6 +96,7 @@ # of the __new__ is an instance of the type w_bltin_new = None + @dont_look_inside def __init__(w_self, space, name, bases_w, dict_w, overridetypedef=None): w_self.space = space @@ -110,7 +109,9 @@ w_self.weakrefable = False w_self.w_doc = space.w_None w_self.weak_subclasses = [] - w_self.__flags__ = 0 # or _HEAPTYPE or _CPYTYPE + w_self.flag_heaptype = False + w_self.flag_cpytype = False + w_self.flag_abstract = False w_self.instancetypedef = overridetypedef if overridetypedef is not None: @@ -368,19 +369,16 @@ raise UnwrapError(w_self) def is_heaptype(w_self): - return w_self.__flags__ & _HEAPTYPE + return w_self.flag_heaptype def is_cpytype(w_self): - return w_self.__flags__ & _CPYTYPE + return w_self.flag_cpytype def is_abstract(w_self): - return w_self.__flags__ & _ABSTRACT + return w_self.flag_abstract def set_abstract(w_self, abstract): - if abstract: - w_self.__flags__ |= _ABSTRACT - else: - w_self.__flags__ &= ~_ABSTRACT + w_self.flag_abstract = bool(abstract) def issubtype(w_self, w_type): w_self = hint(w_self, promote=True) @@ -633,11 +631,12 @@ w_self.bases_w = [w_self.space.w_object] w_bestbase = check_and_find_best_base(w_self.space, w_self.bases_w) w_self.instancetypedef = w_bestbase.instancetypedef - w_self.__flags__ = _HEAPTYPE + w_self.flag_heaptype = True for w_base in w_self.bases_w: if not isinstance(w_base, W_TypeObject): continue - w_self.__flags__ |= w_base.__flags__ + w_self.flag_cpytype |= w_base.flag_cpytype + w_self.flag_abstract |= w_base.flag_abstract hasoldstylebase = copy_flags_from_bases(w_self, w_bestbase) create_all_slots(w_self, hasoldstylebase) diff --git a/pypy/module/_stackless/interp_coroutine.py b/pypy/module/_stackless/interp_coroutine.py --- a/pypy/module/_stackless/interp_coroutine.py +++ b/pypy/module/_stackless/interp_coroutine.py @@ -320,21 +320,22 @@ return space.newtuple(items) def makeStaticMethod(module, classname, funcname): + "NOT_RPYTHON" space = module.space w_klass = space.getattr(space.wrap(module), space.wrap(classname)) # HACK HACK HACK # make the typeobject mutable for a while - from pypy.objspace.std.typeobject import _HEAPTYPE, W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject assert isinstance(w_klass, W_TypeObject) - old_flags = w_klass.__flags__ - w_klass.__flags__ |= _HEAPTYPE + old_flag = w_klass.flag_heaptype + w_klass.flag_heaptype = True space.appexec([w_klass, space.wrap(funcname)], """ (klass, funcname): func = getattr(klass, funcname) setattr(klass, funcname, staticmethod(func.im_func)) """) - w_klass.__flags__ = old_flags + w_klass.flag_heaptype = old_flag def post_install(module): makeStaticMethod(module, 'coroutine', 'getcurrent') diff --git a/pypy/module/_stackless/interp_greenlet.py b/pypy/module/_stackless/interp_greenlet.py --- a/pypy/module/_stackless/interp_greenlet.py +++ b/pypy/module/_stackless/interp_greenlet.py @@ -199,6 +199,7 @@ return space.getattr(w_module, space.wrap(name)) def post_install(module): + "NOT_RPYTHON" makeStaticMethod(module, 'greenlet', 'getcurrent') space = module.space state = AppGreenlet._get_state(space) @@ -206,10 +207,10 @@ w_greenlet = get(space, 'greenlet') # HACK HACK HACK # make the typeobject mutable for a while - from pypy.objspace.std.typeobject import _HEAPTYPE, W_TypeObject + from pypy.objspace.std.typeobject import W_TypeObject assert isinstance(w_greenlet, W_TypeObject) - old_flags = w_greenlet.__flags__ - w_greenlet.__flags__ |= _HEAPTYPE + old_flag = w_greenlet.flag_heaptype + w_greenlet.flag_heaptype = True space.appexec([w_greenlet, state.w_GreenletExit, state.w_GreenletError], """ @@ -217,7 +218,7 @@ greenlet.GreenletExit = exit greenlet.error = error """) - w_greenlet.__flags__ = old_flags + w_greenlet.flag_heaptype = old_flag AppGreenlet.typedef = TypeDef("greenlet", __new__ = interp2app(AppGreenlet.descr_method__new__.im_func), diff --git a/pypy/objspace/std/typetype.py b/pypy/objspace/std/typetype.py --- a/pypy/objspace/std/typetype.py +++ b/pypy/objspace/std/typetype.py @@ -190,8 +190,16 @@ return space.get(w_result, space.w_None, w_type) def descr__flags(space, w_type): + from copy_reg import _HEAPTYPE + _CPYTYPE = 1 # used for non-heap types defined in C + _ABSTRACT = 1 << 20 + # w_type = _check(space, w_type) - return space.wrap(w_type.__flags__) + flags = 0 + if w_type.flag_heaptype: flags |= _HEAPTYPE + if w_type.flag_cpytype: flags |= _CPYTYPE + if w_type.flag_abstract: flags |= _ABSTRACT + return space.wrap(flags) def descr_get__module(space, w_type): w_type = _check(space, w_type) From commits-noreply at bitbucket.org Sun Mar 13 15:04:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 15:04:01 +0100 (CET) Subject: [pypy-svn] pypy default: Fix import. Message-ID: <20110313140401.E1C0D282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42558:55dee680172c Date: 2011-03-13 13:30 +0100 http://bitbucket.org/pypy/pypy/changeset/55dee680172c/ Log: Fix import. diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -4,7 +4,7 @@ from pypy.rpython.lltypesystem import rffi, lltype from pypy.rpython.annlowlevel import llhelper from pypy.interpreter.baseobjspace import DescrMismatch -from pypy.objspace.std.typeobject import W_TypeObject, _CPYTYPE +from pypy.objspace.std.typeobject import W_TypeObject from pypy.interpreter.typedef import GetSetProperty from pypy.module.cpyext.api import ( cpython_api, cpython_struct, bootstrap_function, Py_ssize_t, From commits-noreply at bitbucket.org Sun Mar 13 17:21:09 2011 From: commits-noreply at bitbucket.org (mitsuhiko) Date: Sun, 13 Mar 2011 17:21:09 +0100 (CET) Subject: [pypy-svn] jitviewer default: Automatically guess PYTHONPATH for pypy module Message-ID: <20110313162109.08D9F282BD4@codespeak.net> Author: Armin Ronacher Branch: Changeset: r104:d4858c532819 Date: 2011-03-13 12:20 -0400 http://bitbucket.org/pypy/jitviewer/changeset/d4858c532819/ Log: Automatically guess PYTHONPATH for pypy module diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -16,6 +16,17 @@ except ImportError: sys.path.insert(0, os.path.abspath(os.path.join(__file__, '..', '..'))) +try: + import pypy +except ImportError: + import __pypy__ + sys.path.append(os.path.join(__pypy__.__file__, '..', '..', '..')) + try: + import pypy + except ImportError: + raise ImportError('Could not import pypy module, make sure to ' + 'add the pypy module to PYTHONPATH') + import cgi import flask import inspect From commits-noreply at bitbucket.org Sun Mar 13 17:21:09 2011 From: commits-noreply at bitbucket.org (mitsuhiko) Date: Sun, 13 Mar 2011 17:21:09 +0100 (CET) Subject: [pypy-svn] jitviewer default: Merged upstream. Message-ID: <20110313162109.6389C282BD8@codespeak.net> Author: Armin Ronacher Branch: Changeset: r105:259f6dd02670 Date: 2011-03-13 12:20 -0400 http://bitbucket.org/pypy/jitviewer/changeset/259f6dd02670/ Log: Merged upstream. diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -1,11 +1,14 @@ #!/usr/bin/env pypy-c """ A web-based browser of your log files. Run by -jitviewer.py [port] + jitviewer.py [port] and point your browser to http://localhost:5000 +Demo logfile available in this directory as 'log'. -Demo logfile available in this directory as 'log'. +To produce the logfile for your program, run: + + PYPYLOG=jit-log-opt,jit-backend-counts:mylogfile.log pypy-c myapp.py """ import sys From commits-noreply at bitbucket.org Sun Mar 13 18:15:44 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 18:15:44 +0100 (CET) Subject: [pypy-svn] pypy default: A failing test. Message-ID: <20110313171544.9BCF1282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42559:36a4bd1865c6 Date: 2011-03-13 11:00 -0400 http://bitbucket.org/pypy/pypy/changeset/36a4bd1865c6/ Log: A failing test. diff --git a/pypy/rpython/test/test_rpbc.py b/pypy/rpython/test/test_rpbc.py --- a/pypy/rpython/test/test_rpbc.py +++ b/pypy/rpython/test/test_rpbc.py @@ -617,6 +617,32 @@ assert self.read_attr(res, "z") == -7645 assert self.read_attr(res, "extra") == 42 + def test_call_classes_with_noarg_init(self): + class A: + foo = 21 + class B(A): + foo = 22 + class C(A): + def __init__(self): + self.foo = 42 + class D(A): + def __init__(self): + self.foo = 43 + def f(i): + if i == 1: + cls = B + elif i == 2: + cls = D + else: + cls = C + return cls().foo + res = self.interpret(f, [0]) + assert res == 42 + res = self.interpret(f, [1]) + assert res == 22 + res = self.interpret(f, [2]) + assert res == 43 + def test_conv_from_None(self): class A(object): pass def none(): From commits-noreply at bitbucket.org Sun Mar 13 18:15:46 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 13 Mar 2011 18:15:46 +0100 (CET) Subject: [pypy-svn] pypy default: Bah, it's too messy to implement. So let's just say it's not RPython Message-ID: <20110313171546.0D0A3282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42560:2fb044e92956 Date: 2011-03-13 13:15 -0400 http://bitbucket.org/pypy/pypy/changeset/2fb044e92956/ Log: Bah, it's too messy to implement. So let's just say it's not RPython to have an __init__ in the subclasses but not in the parent class in that case. diff --git a/pypy/annotation/description.py b/pypy/annotation/description.py --- a/pypy/annotation/description.py +++ b/pypy/annotation/description.py @@ -665,6 +665,21 @@ else: # call to multiple classes: specialization not supported classdefs = [desc.getuniqueclassdef() for desc in descs] + # If some of the classes have an __init__ and others not, then + # we complain, even though in theory it could work if all the + # __init__s take no argument. But it's messy to implement, so + # let's just say it is not RPython and you have to add an empty + # __init__ to your base class. + has_init = False + for desc in descs: + s_init = desc.s_read_attribute('__init__') + has_init |= isinstance(s_init, SomePBC) + basedesc = ClassDesc.getcommonbase(descs) + s_init = basedesc.s_read_attribute('__init__') + parent_has_init = isinstance(s_init, SomePBC) + if has_init and not parent_has_init: + raise Exception("some subclasses among %r declare __init__()," + " but not the common parent class" % (descs,)) # make a PBC of MethodDescs, one for the __init__ of each class initdescs = [] for desc, classdef in zip(descs, classdefs): @@ -687,6 +702,23 @@ args, s_None) consider_call_site = staticmethod(consider_call_site) + def getallbases(self): + desc = self + while desc is not None: + yield desc + desc = desc.basedesc + + def getcommonbase(descs): + commondesc = descs[0] + for desc in descs[1:]: + allbases = set(commondesc.getallbases()) + while desc not in allbases: + assert desc is not None, "no common base for %r" % (descs,) + desc = desc.basedesc + commondesc = desc + return commondesc + getcommonbase = staticmethod(getcommonbase) + def rowkey(self): return self diff --git a/pypy/annotation/test/test_description.py b/pypy/annotation/test/test_description.py new file mode 100644 --- /dev/null +++ b/pypy/annotation/test/test_description.py @@ -0,0 +1,22 @@ +from pypy.annotation.description import ClassDesc + +class FakeBookkeeper: + def __init__(self): + self.seen = {} + def getdesc(self, cls): + if cls not in self.seen: + self.seen[cls] = ClassDesc(self, cls) + return self.seen[cls] + +def test_getcommonbase(): + class Base(object): pass + class A(Base): pass + class B(A): pass + class C(B): pass + class D(A): pass + bk = FakeBookkeeper() + dA = bk.getdesc(A) + dB = bk.getdesc(B) + dC = bk.getdesc(C) + dD = bk.getdesc(D) + assert ClassDesc.getcommonbase([dC, dD]) is dA diff --git a/pypy/annotation/test/test_annrpython.py b/pypy/annotation/test/test_annrpython.py --- a/pypy/annotation/test/test_annrpython.py +++ b/pypy/annotation/test/test_annrpython.py @@ -1060,9 +1060,11 @@ assert s.const == True def test_alloc_like(self): - class C1(object): + class Base(object): pass - class C2(object): + class C1(Base): + pass + class C2(Base): pass def inst(cls): @@ -3438,6 +3440,28 @@ a = self.RPythonAnnotator() a.build_types(f, [int]) + def test_call_classes_with_noarg_init(self): + class A: + foo = 21 + class B(A): + foo = 22 + class C(A): + def __init__(self): + self.foo = 42 + class D(A): + def __init__(self): + self.foo = 43 + def f(i): + if i == 1: + cls = B + elif i == 2: + cls = D + else: + cls = C + return cls().foo + a = self.RPythonAnnotator() + raises(Exception, a.build_types, f, [int]) + def g(n): return [0,1,2,n] diff --git a/pypy/rpython/test/test_rpbc.py b/pypy/rpython/test/test_rpbc.py --- a/pypy/rpython/test/test_rpbc.py +++ b/pypy/rpython/test/test_rpbc.py @@ -617,32 +617,6 @@ assert self.read_attr(res, "z") == -7645 assert self.read_attr(res, "extra") == 42 - def test_call_classes_with_noarg_init(self): - class A: - foo = 21 - class B(A): - foo = 22 - class C(A): - def __init__(self): - self.foo = 42 - class D(A): - def __init__(self): - self.foo = 43 - def f(i): - if i == 1: - cls = B - elif i == 2: - cls = D - else: - cls = C - return cls().foo - res = self.interpret(f, [0]) - assert res == 42 - res = self.interpret(f, [1]) - assert res == 22 - res = self.interpret(f, [2]) - assert res == 43 - def test_conv_from_None(self): class A(object): pass def none(): From commits-noreply at bitbucket.org Sun Mar 13 19:23:06 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 19:23:06 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: (alex, fijal): Remove some OPTIMIZE_ISMPLE. Message-ID: <20110313182306.1C2A1282BD4@codespeak.net> Author: Alex Gaynor Branch: enable-opts Changeset: r42562:bf1112c77983 Date: 2011-03-13 14:22 -0400 http://bitbucket.org/pypy/pypy/changeset/bf1112c77983/ Log: (alex, fijal): Remove some OPTIMIZE_ISMPLE. diff --git a/pypy/jit/tl/tla/test_tla.py b/pypy/jit/tl/tla/test_tla.py --- a/pypy/jit/tl/tla/test_tla.py +++ b/pypy/jit/tl/tla/test_tla.py @@ -61,7 +61,7 @@ tla.RETURN # stack depth == 2 here, error! ] py.test.raises(AssertionError, "interp(code, tla.W_IntObject(234))") - + def test_add(): code = [ tla.CONST_INT, 20, @@ -90,7 +90,7 @@ ] res = interp(code, tla.W_IntObject(0)) assert res.intvalue == 123 - + res = interp(code, tla.W_IntObject(1)) assert res.intvalue == 234 @@ -138,7 +138,7 @@ tla.CONST_INT, 3, tla.MUL, tla.RETURN - ] + ] res = interp(code, tla.W_StringObject('foo ')) assert res.strvalue == 'foo foo foo ' @@ -153,10 +153,9 @@ assert isinstance(res, tla.W_FloatObject) assert res.floatval == 2.5 -# ____________________________________________________________ +# ____________________________________________________________ from pypy.jit.metainterp.test.test_basic import LLJitMixin -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_NO_UNROLL class TestLLtype(LLJitMixin): def test_loop(self): @@ -177,6 +176,5 @@ w_result = interp(code, tla.W_IntObject(intvalue)) assert isinstance(w_result, tla.W_IntObject) return w_result.intvalue - res = self.meta_interp(interp_w, [42], listops=True, - optimizer=OPTIMIZER_NO_UNROLL) + res = self.meta_interp(interp_w, [42], listops=True) assert res == 0 diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -11,7 +11,7 @@ from pypy.rlib import rgc from pypy.rpython.lltypesystem import lltype, llmemory, rffi from pypy.rpython.lltypesystem.lloperation import llop -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, dont_look_inside +from pypy.rlib.jit import JitDriver, dont_look_inside from pypy.rlib.jit import purefunction, unroll_safe from pypy.jit.backend.x86.runner import CPU386 from pypy.jit.backend.llsupport.gc import GcRefList, GcRootMap_asmgcc @@ -88,7 +88,7 @@ ann.build_types(f, [s_list_of_strings], main_entry_point=True) t.buildrtyper().specialize() if kwds['jit']: - apply_jit(t, optimizer=OPTIMIZER_SIMPLE) + apply_jit(t, enable_opts='') cbuilder = genc.CStandaloneBuilder(t, f, t.config) cbuilder.generate_source() cbuilder.compile() @@ -158,7 +158,7 @@ x.foo = 5 return weakref.ref(x) def main_allfuncs(name, n, x): - num = name_to_func[name] + num = name_to_func[name] n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s = funcs[num][0](n, x) while n > 0: myjitdriver.can_enter_jit(num=num, n=n, x=x, x0=x0, x1=x1, @@ -426,7 +426,7 @@ def define_compile_framework_external_exception_handling(cls): def before(n, x): x = X(0) - return n, x, None, None, None, None, None, None, None, None, None, None + return n, x, None, None, None, None, None, None, None, None, None, None @dont_look_inside def g(x): @@ -458,7 +458,7 @@ def test_compile_framework_external_exception_handling(self): self.run('compile_framework_external_exception_handling') - + def define_compile_framework_bug1(self): @purefunction def nonmoving(): diff --git a/pypy/jit/backend/x86/test/test_ztranslation.py b/pypy/jit/backend/x86/test/test_ztranslation.py --- a/pypy/jit/backend/x86/test/test_ztranslation.py +++ b/pypy/jit/backend/x86/test/test_ztranslation.py @@ -1,6 +1,6 @@ import py, os, sys from pypy.tool.udir import udir -from pypy.rlib.jit import JitDriver, OPTIMIZER_FULL, unroll_parameters +from pypy.rlib.jit import JitDriver, unroll_parameters from pypy.rlib.jit import PARAMETERS, dont_look_inside from pypy.rlib.jit import hint from pypy.jit.metainterp.jitprof import Profiler @@ -99,10 +99,10 @@ class Thing(object): def __init__(self, val): self.val = val - + class Frame(object): _virtualizable2_ = ['thing'] - + driver = JitDriver(greens = ['codeno'], reds = ['i', 'frame'], virtualizables = ['frame'], get_printable_location = lambda codeno: str(codeno)) diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py --- a/pypy/jit/metainterp/warmspot.py +++ b/pypy/jit/metainterp/warmspot.py @@ -30,7 +30,8 @@ # ____________________________________________________________ # Bootstrapping -def apply_jit(translator, backend_name="auto", inline=False, **kwds): +def apply_jit(translator, backend_name="auto", inline=False, + enable_opts=ALL_OPTS_NAMES, **kwds): if 'CPUClass' not in kwds: from pypy.jit.backend.detect_cpu import getcpuclass kwds['CPUClass'] = getcpuclass(backend_name) @@ -45,6 +46,7 @@ **kwds) for jd in warmrunnerdesc.jitdrivers_sd: jd.warmstate.set_param_inlining(inline) + jd.warmstate.set_param_enable_opts(enable_opts) warmrunnerdesc.finish() translator.warmrunnerdesc = warmrunnerdesc # for later debugging @@ -275,7 +277,7 @@ stats = history.NoStats() else: stats = history.Stats() - self.stats = stats + self.stats = stats if translate_support_code: self.annhelper = MixLevelHelperAnnotator(self.translator.rtyper) annhelper = self.annhelper @@ -329,7 +331,7 @@ return 'DoneWithThisFrameVoid()' class DoneWithThisFrameInt(JitException): - def __init__(self, result): + def __init__(self, result): assert lltype.typeOf(result) is lltype.Signed self.result = result def __str__(self): @@ -779,14 +781,14 @@ if self.metainterp_sd.profiler.initialized: self.metainterp_sd.profiler.finish() self.metainterp_sd.cpu.finish_once() - + if self.cpu.translate_support_code: call_final_function(self.translator, finish, annhelper = self.annhelper) def rewrite_set_param(self): from pypy.rpython.lltypesystem.rstr import STR - + closures = {} graphs = self.translator.graphs _, PTR_SET_PARAM_FUNCTYPE = self.cpu.ts.get_FuncType([lltype.Signed], diff --git a/pypy/jit/backend/x86/test/test_basic.py b/pypy/jit/backend/x86/test/test_basic.py --- a/pypy/jit/backend/x86/test/test_basic.py +++ b/pypy/jit/backend/x86/test/test_basic.py @@ -3,7 +3,7 @@ from pypy.jit.metainterp.warmspot import ll_meta_interp from pypy.jit.metainterp.test import test_basic from pypy.jit.codewriter.policy import StopAtXPolicy -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE +from pypy.rlib.jit import JitDriver class Jit386Mixin(test_basic.LLJitMixin): type_system = 'lltype' @@ -29,7 +29,7 @@ n -= x.arg x.arg = 6 # prevents 'x.arg' from being annotated as constant return n - res = self.meta_interp(f, [31], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [31], enable_opts='') assert res == -4 def test_r_dict(self): From commits-noreply at bitbucket.org Sun Mar 13 19:30:20 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 19:30:20 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (alex, mitsuhiko, fijal): Move timer function to rlib and use assembler. Message-ID: <20110313183020.657C7282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42563:7cd4d3e4d745 Date: 2011-03-13 14:30 -0400 http://bitbucket.org/pypy/pypy/changeset/7cd4d3e4d745/ Log: (alex, mitsuhiko, fijal): Move timer function to rlib and use assembler. diff --git a/pypy/module/_lsprof/interp_lsprof.py b/pypy/module/_lsprof/interp_lsprof.py --- a/pypy/module/_lsprof/interp_lsprof.py +++ b/pypy/module/_lsprof/interp_lsprof.py @@ -8,21 +8,11 @@ interp_attrproperty) from pypy.rlib import jit from pypy.rpython.lltypesystem import rffi -from pypy.tool.autopath import pypydir import time, sys # timer -eci = rffi.ExternalCompilationInfo( - include_dirs = [str(py.path.local(pypydir).join('translator', 'c'))], - includes=["src/timer.h"], - separate_module_sources = [' '], - ) -read_timestamp_double = rffi.llexternal( - 'pypy_read_timestamp_double', [], rffi.DOUBLE, - compilation_info=eci, _nowrapper=True) - class W_StatsEntry(Wrappable): def __init__(self, space, frame, callcount, reccallcount, tt, it, w_sublist): diff --git a/pypy/translator/c/src/timer.h b/pypy/translator/c/src/timer.h --- a/pypy/translator/c/src/timer.h +++ b/pypy/translator/c/src/timer.h @@ -10,37 +10,26 @@ #ifndef PYPY_NOT_MAIN_FILE /* implementations */ -# ifdef _WIN32 - double pypy_read_timestamp_double(void) { - static double pypy_timer_scale = 0.0; - long long timestamp; - long long scale; - QueryPerformanceCounter((LARGE_INTEGER*)&(timestamp)); - if (pypy_timer_scale == 0.0) { - QueryPerformanceFrequency((LARGE_INTEGER*)&(scale)); - pypy_timer_scale = 1.0 / (double)scale; - } - return ((double)timestamp) * pypy_timer_scale; - } +#ifdef _WIN32 +long long pypy_read_timestamp(void) { + long long timestamp; + long long scale; + QueryPerformanceCounter((LARGE_INTEGER*)&(timestamp)); + return timestamp; +} -# else -# include -# include +#else - double pypy_read_timestamp_double(void) - { -# ifdef CLOCK_THREAD_CPUTIME_ID - struct timespec tspec; - clock_gettime(CLOCK_THREAD_CPUTIME_ID, &tspec); - return ((double)tspec.tv_sec) + ((double)tspec.tv_nsec) / 1e9; -# else - /* argh, we don't seem to have clock_gettime(). Bad OS. */ - struct timeval tv; - gettimeofday(&tv, NULL); - return ((double)tv.tv_sec) + ((double)tv.tv_usec) / 1e6; -# endif - } +#include "inttypes.h" -# endif +long long pypy_read_timestamp(void) { + uint32_t low, high; + __asm__ __volatile__ ( + "rdtsc" : "=a" (low), "=d" (high) + ); + return ((long long)high << 32) + low; +} + #endif #endif +#endif diff --git a/pypy/rlib/rtimer.py b/pypy/rlib/rtimer.py new file mode 100644 --- /dev/null +++ b/pypy/rlib/rtimer.py @@ -0,0 +1,21 @@ +import time + +import py + +from pypy.rlib.rarithmetic import r_longlong +from pypy.rpython.lltypesystem import rffi +from pypy.tool.autopath import pypydir + + +eci = rffi.ExternalCompilationInfo( + include_dirs = [str(py.path.local(pypydir).join('translator', 'c'))], + includes=["src/timer.h"], + separate_module_sources = [' '], +) +c_read_timestamp = rffi.llexternal( + 'pypy_read_timestamp', [], rffi.LONGLONG, + compilation_info=eci, _nowrapper=True +) + +def read_timestamp(): + return c_read_timestamp() \ No newline at end of file diff --git a/pypy/module/_lsprof/test/test_cprofile.py b/pypy/module/_lsprof/test/test_cprofile.py --- a/pypy/module/_lsprof/test/test_cprofile.py +++ b/pypy/module/_lsprof/test/test_cprofile.py @@ -1,16 +1,6 @@ import py from pypy.conftest import gettestobjspace, option -def test_timer(): - from pypy.module._lsprof.interp_lsprof import read_timestamp_double - import time - t1 = read_timestamp_double() - start = time.time() - while time.time() - start < 1.0: - pass # busy wait - t2 = read_timestamp_double() - assert 0.9 < t2 - t1 < 1.9 - class AppTestCProfile(object): keywords = {} diff --git a/pypy/rlib/test/test_rtimer.py b/pypy/rlib/test/test_rtimer.py new file mode 100644 --- /dev/null +++ b/pypy/rlib/test/test_rtimer.py @@ -0,0 +1,14 @@ +import time + +from pypy.rlib.rtimer import read_timestamp + + +def test_timer(): + t1 = read_timestamp() + start = time.time() + while time.time() - start < 1.0: + # busy wait + pass + t2 = read_timestamp() + # We're counting ticks, verify they look correct + assert t2 - t1 > 1000 From commits-noreply at bitbucket.org Sun Mar 13 19:37:09 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 19:37:09 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (fijal, alex): Write extregistry for read_timestamp. Message-ID: <20110313183709.E6900282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42564:01c036925078 Date: 2011-03-13 14:36 -0400 http://bitbucket.org/pypy/pypy/changeset/01c036925078/ Log: (fijal, alex): Write extregistry for read_timestamp. diff --git a/pypy/rlib/rtimer.py b/pypy/rlib/rtimer.py --- a/pypy/rlib/rtimer.py +++ b/pypy/rlib/rtimer.py @@ -3,6 +3,7 @@ import py from pypy.rlib.rarithmetic import r_longlong +from pypy.rpython.extregistry import ExtRegistryEntry from pypy.rpython.lltypesystem import rffi from pypy.tool.autopath import pypydir @@ -18,4 +19,16 @@ ) def read_timestamp(): - return c_read_timestamp() \ No newline at end of file + return c_read_timestamp() + + +class ReadTimestampEntry(ExtRegistryEntry): + _about_ = read_timestamp + + def compute_annotation(self): + from pypy.annotation.model import SomeInteger + return SomeInteger(knowntype=r_longlong) + + def specialize_call(self, hop): + hop.exception_cannot_occur() + return hop.genop("ll_read_timestamp") \ No newline at end of file diff --git a/pypy/rlib/test/test_rtimer.py b/pypy/rlib/test/test_rtimer.py --- a/pypy/rlib/test/test_rtimer.py +++ b/pypy/rlib/test/test_rtimer.py @@ -1,14 +1,23 @@ import time from pypy.rlib.rtimer import read_timestamp +from pypy.rpython.test.test_llinterp import interpret -def test_timer(): +def timer(): t1 = read_timestamp() start = time.time() - while time.time() - start < 1.0: + while time.time() - start < 0.1: # busy wait pass t2 = read_timestamp() + return t2 - t1 + +def test_timer(): + diff = timer() # We're counting ticks, verify they look correct - assert t2 - t1 > 1000 + assert diff > 1000 + +def test_annotation(): + diff = interpret(timer, []) + assert diff > 1000 \ No newline at end of file From commits-noreply at bitbucket.org Sun Mar 13 19:42:29 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sun, 13 Mar 2011 19:42:29 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Fix annotation hlstr Message-ID: <20110313184229.15CA8282B90@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42565:1271894919f3 Date: 2011-03-12 22:40 -0500 http://bitbucket.org/pypy/pypy/changeset/1271894919f3/ Log: Fix annotation hlstr diff --git a/pypy/rpython/annlowlevel.py b/pypy/rpython/annlowlevel.py --- a/pypy/rpython/annlowlevel.py +++ b/pypy/rpython/annlowlevel.py @@ -412,9 +412,9 @@ def compute_result_annotation(self, s_ll_str): if strtype is str: - return annmodel.SomeString() + return annmodel.SomeString(can_be_None=True) else: - return annmodel.SomeUnicodeString() + return annmodel.SomeUnicodeString(can_be_None=True) def specialize_call(self, hop): hop.exception_cannot_occur() From commits-noreply at bitbucket.org Sun Mar 13 19:42:31 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sun, 13 Mar 2011 19:42:31 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Progress on fixing tests Message-ID: <20110313184231.8D3C8282BD9@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42566:5f532ba0133f Date: 2011-03-12 22:40 -0500 http://bitbucket.org/pypy/pypy/changeset/5f532ba0133f/ Log: Progress on fixing tests diff --git a/pypy/jit/metainterp/test/test_warmspot.py b/pypy/jit/metainterp/test/test_warmspot.py --- a/pypy/jit/metainterp/test/test_warmspot.py +++ b/pypy/jit/metainterp/test/test_warmspot.py @@ -7,6 +7,7 @@ from pypy.jit.metainterp.history import BoxInt from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_NAMES class Exit(Exception): @@ -81,7 +82,9 @@ for loc in get_stats().locations: assert loc == 'GREEN IS 123.' - def test_set_param_optimizer(self): + def test_set_param_enable_opts(self): + from pypy.rpython.annlowlevel import llstr, hlstr + myjitdriver = JitDriver(greens = [], reds = ['n']) class A(object): def m(self, n): @@ -93,45 +96,20 @@ myjitdriver.jit_merge_point(n=n) n = A().m(n) return n - def f(n, optimizer): - myjitdriver.set_param('optimizer', optimizer) + def f(n, enable_opts): + myjitdriver.set_param('enable_opts', hlstr(enable_opts)) return g(n) # check that the set_param will override the default - res = self.meta_interp(f, [10, OPTIMIZER_SIMPLE], - optimizer=OPTIMIZER_FULL) + res = self.meta_interp(f, [10, llstr('')]) assert res == 0 self.check_loops(new_with_vtable=1) - res = self.meta_interp(f, [10, OPTIMIZER_FULL], - optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [10, llstr(ALL_OPTS_NAMES)], + enable_opts='') assert res == 0 self.check_loops(new_with_vtable=0) - def test_optimizer_default_choice(self): - myjitdriver = JitDriver(greens = [], reds = ['n']) - def f(n): - while n > 0: - myjitdriver.can_enter_jit(n=n) - myjitdriver.jit_merge_point(n=n) - n -= 1 - return n - - from pypy.rpython.test.test_llinterp import gengraph - t, rtyper, graph = gengraph(f, [int], type_system=self.type_system, - **{'translation.gc': 'boehm'}) - - from pypy.jit.metainterp.warmspot import WarmRunnerDesc - - warmrunnerdescr = WarmRunnerDesc(t, CPUClass=self.CPUClass, - optimizer=None) # pick default - - from pypy.jit.metainterp import optimize - - state = warmrunnerdescr.jitdrivers_sd[0].warmstate - assert state.optimize_loop is optimize.optimize_loop - assert state.optimize_bridge is optimize.optimize_bridge - def test_unwanted_loops(self): mydriver = JitDriver(reds = ['n', 'total', 'm'], greens = []) diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py --- a/pypy/rlib/jit.py +++ b/pypy/rlib/jit.py @@ -538,18 +538,24 @@ def compute_result_annotation(self, s_name, s_value): from pypy.annotation import model as annmodel assert s_name.is_constant() - if annmodel.SomeInteger().contains(s_value): - pass + if s_name.const == 'enable_opts': + assert annmodel.SomeString(can_be_None=True).contains(s_value) else: - assert annmodel.SomeString().contains(s_value) + assert annmodel.SomeInteger().contains(s_value) return annmodel.s_None def specialize_call(self, hop): from pypy.rpython.lltypesystem import lltype + from pypy.rpython.lltypesystem.rstr import string_repr + hop.exception_cannot_occur() driver = self.instance.im_self name = hop.args_s[0].const - v_value = hop.inputarg(hop.args_r[1], arg=1) + if name == 'enable_opts': + repr = string_repr + else: + repr = lltype.Signed + v_value = hop.inputarg(repr, arg=1) vlist = [hop.inputconst(lltype.Void, "set_param"), hop.inputconst(lltype.Void, driver), hop.inputconst(lltype.Void, name), diff --git a/pypy/jit/metainterp/warmstate.py b/pypy/jit/metainterp/warmstate.py --- a/pypy/jit/metainterp/warmstate.py +++ b/pypy/jit/metainterp/warmstate.py @@ -224,9 +224,11 @@ self.inlining = value def set_param_enable_opts(self, value): - from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT + from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT, ALL_OPTS_NAMES d = {} + if value is None: + value = ALL_OPTS_NAMES for name in value.split(":"): if name: if name not in ALL_OPTS_DICT: diff --git a/pypy/jit/metainterp/test/test_ztranslation.py b/pypy/jit/metainterp/test/test_ztranslation.py --- a/pypy/jit/metainterp/test/test_ztranslation.py +++ b/pypy/jit/metainterp/test/test_ztranslation.py @@ -89,7 +89,6 @@ res = rpython_ll_meta_interp(main, [40, 5], CPUClass=self.CPUClass, type_system=self.type_system, - optimizer=OPTIMIZER_FULL, ProfilerClass=Profiler) assert res == main(40, 5) From commits-noreply at bitbucket.org Sun Mar 13 19:42:32 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sun, 13 Mar 2011 19:42:32 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: small fixes Message-ID: <20110313184232.28C05282BD9@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42567:c602437bcdc8 Date: 2011-03-13 14:41 -0400 http://bitbucket.org/pypy/pypy/changeset/c602437bcdc8/ Log: small fixes diff --git a/pypy/rlib/rtimer.py b/pypy/rlib/rtimer.py --- a/pypy/rlib/rtimer.py +++ b/pypy/rlib/rtimer.py @@ -25,10 +25,10 @@ class ReadTimestampEntry(ExtRegistryEntry): _about_ = read_timestamp - def compute_annotation(self): + def compute_result_annotation(self): from pypy.annotation.model import SomeInteger return SomeInteger(knowntype=r_longlong) def specialize_call(self, hop): hop.exception_cannot_occur() - return hop.genop("ll_read_timestamp") \ No newline at end of file + return hop.genop("ll_read_timestamp", [], resulttype=rffi.LONGLONG) From commits-noreply at bitbucket.org Sun Mar 13 19:42:32 2011 From: commits-noreply at bitbucket.org (fijal) Date: Sun, 13 Mar 2011 19:42:32 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: merge Message-ID: <20110313184232.617C8282BDA@codespeak.net> Author: Maciej Fijalkowski Branch: enable-opts Changeset: r42568:4000746423b4 Date: 2011-03-13 14:42 -0400 http://bitbucket.org/pypy/pypy/changeset/4000746423b4/ Log: merge From commits-noreply at bitbucket.org Sun Mar 13 19:47:38 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 19:47:38 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: implement llop for readtimestamp Message-ID: <20110313184738.4D9EE282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42569:b47ad4fbaaa2 Date: 2011-03-13 14:47 -0400 http://bitbucket.org/pypy/pypy/changeset/b47ad4fbaaa2/ Log: implement llop for readtimestamp diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -32,7 +32,7 @@ assert isinstance(canraise, tuple) assert not canraise or not canfold - + # The operation manipulates PyObjects self.pyobj = pyobj @@ -440,6 +440,7 @@ 'get_write_barrier_failing_case': LLOp(sideeffects=False), 'get_write_barrier_from_array_failing_case': LLOp(sideeffects=False), 'gc_get_type_info_group': LLOp(sideeffects=False), + 'll_read_timestamp': LLOp(sideeffects=False, canrun=True), # __________ GC operations __________ @@ -482,12 +483,12 @@ 'gc_typeids_z' : LLOp(), # ------- JIT & GC interaction, only for some GCs ---------- - + 'gc_adr_of_nursery_free' : LLOp(), # ^^^ returns an address of nursery free pointer, for later modifications 'gc_adr_of_nursery_top' : LLOp(), # ^^^ returns an address of pointer, since it can change at runtime - + # experimental operations in support of thread cloning, only # implemented by the Mark&Sweep GC 'gc_x_swap_pool': LLOp(canraise=(MemoryError,), canunwindgc=True), diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -372,7 +372,7 @@ return ord(b) def op_cast_int_to_unichar(b): - assert type(b) is int + assert type(b) is int return unichr(b) def op_cast_int_to_uint(b): @@ -570,6 +570,10 @@ def op_shrink_array(array, smallersize): return False +def op_ll_read_timestamp(): + from pypy.rlib.rtimer import read_timestamp + return read_timestamp() + # ____________________________________________________________ def get_op_impl(opname): From commits-noreply at bitbucket.org Sun Mar 13 19:54:08 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 19:54:08 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (alex, fijal): read_timestamp compiles Message-ID: <20110313185408.D9EB636C202@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42570:8581da0dcc0a Date: 2011-03-13 14:53 -0400 http://bitbucket.org/pypy/pypy/changeset/8581da0dcc0a/ Log: (alex, fijal): read_timestamp compiles diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -440,7 +440,7 @@ 'get_write_barrier_failing_case': LLOp(sideeffects=False), 'get_write_barrier_from_array_failing_case': LLOp(sideeffects=False), 'gc_get_type_info_group': LLOp(sideeffects=False), - 'll_read_timestamp': LLOp(sideeffects=False, canrun=True), + 'll_read_timestamp': LLOp(canrun=True), # __________ GC operations __________ diff --git a/pypy/translator/c/src/timer.h b/pypy/translator/c/src/timer.h --- a/pypy/translator/c/src/timer.h +++ b/pypy/translator/c/src/timer.h @@ -4,12 +4,11 @@ /* XXX Some overlap with the stuff in debug_print */ -/* prototypes */ -double pypy_read_timestamp_double(void); - #ifndef PYPY_NOT_MAIN_FILE /* implementations */ +#define OP_LL_READ_TIMESTAMP(v) v = pypy_read_timestamp(); + #ifdef _WIN32 long long pypy_read_timestamp(void) { long long timestamp; diff --git a/pypy/translator/c/src/g_include.h b/pypy/translator/c/src/g_include.h --- a/pypy/translator/c/src/g_include.h +++ b/pypy/translator/c/src/g_include.h @@ -10,6 +10,7 @@ # include "traceback.h" # include "marshal.h" # include "eval.h" +# include "timer.h" #else # include # include diff --git a/pypy/rlib/test/test_rtimer.py b/pypy/rlib/test/test_rtimer.py --- a/pypy/rlib/test/test_rtimer.py +++ b/pypy/rlib/test/test_rtimer.py @@ -2,7 +2,7 @@ from pypy.rlib.rtimer import read_timestamp from pypy.rpython.test.test_llinterp import interpret - +from pypy.translator.c.test.test_genc import compile def timer(): t1 = read_timestamp() @@ -20,4 +20,9 @@ def test_annotation(): diff = interpret(timer, []) + assert diff > 1000 + +def test_compile_c(): + function = compile(timer, []) + diff = function() assert diff > 1000 \ No newline at end of file From commits-noreply at bitbucket.org Sun Mar 13 19:58:32 2011 From: commits-noreply at bitbucket.org (tav) Date: Sun, 13 Mar 2011 19:58:32 +0100 (CET) Subject: [pypy-svn] pypy default: Removed a ctypes xfail and hard-coded test of OS X macholib. Message-ID: <20110313185832.B4389282B90@codespeak.net> Author: tav Branch: Changeset: r42571:8eeb89162a3b Date: 2011-03-13 18:07 +0000 http://bitbucket.org/pypy/pypy/changeset/8eeb89162a3b/ Log: Removed a ctypes xfail and hard-coded test of OS X macholib. diff --git a/lib-python/modified-2.7.0/ctypes/test/test_objects.py b/lib-python/modified-2.7.0/ctypes/test/test_objects.py --- a/lib-python/modified-2.7.0/ctypes/test/test_objects.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_objects.py @@ -22,7 +22,7 @@ >>> array[4] = 'foo bar' >>> array._objects -{'4': } +{'4': } >>> array[4] 'foo bar' >>> @@ -47,9 +47,9 @@ >>> x.array[0] = 'spam spam spam' >>> x._objects -{'0:2': } +{'0:2': } >>> x.array._b_base_._objects -{'0:2': } +{'0:2': } >>> ''' diff --git a/lib-python/modified-2.7.0/ctypes/test/test_macholib.py b/lib-python/modified-2.7.0/ctypes/test/test_macholib.py --- a/lib-python/modified-2.7.0/ctypes/test/test_macholib.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_macholib.py @@ -52,7 +52,6 @@ '/usr/lib/libSystem.B.dylib') result = find_lib('z') - self.assertTrue(result.startswith('/usr/lib/libz.1')) self.assertTrue(result.endswith('.dylib')) self.assertEqual(find_lib('IOKit'), diff --git a/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py b/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py --- a/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py @@ -49,7 +49,6 @@ func.restype = c_long func.argtypes = None - @xfail def test_paramflags(self): # function returns c_void_p result, # and has a required parameter named 'input' From commits-noreply at bitbucket.org Sun Mar 13 19:58:33 2011 From: commits-noreply at bitbucket.org (tav) Date: Sun, 13 Mar 2011 19:58:33 +0100 (CET) Subject: [pypy-svn] pypy default: Added kwargs/extended paramflags support and tests to ctypes.CFUNCTYPE. Message-ID: <20110313185833.9B44E282B90@codespeak.net> Author: tav Branch: Changeset: r42572:b16c3795183e Date: 2011-03-13 18:58 +0000 http://bitbucket.org/pypy/pypy/changeset/b16c3795183e/ Log: Added kwargs/extended paramflags support and tests to ctypes.CFUNCTYPE. diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -1,17 +1,28 @@ - -from _ctypes.basics import _CData, _CDataMeta, cdata_from_address -from _ctypes.primitive import SimpleType -from _ctypes.basics import ArgumentError, keepalive_key -from _ctypes.builtin import set_errno, set_last_error import _rawffi import sys import traceback +from _ctypes.basics import ArgumentError, keepalive_key +from _ctypes.basics import _CData, _CDataMeta, cdata_from_address +from _ctypes.builtin import set_errno, set_last_error +from _ctypes.primitive import SimpleType + # XXX this file needs huge refactoring I fear PARAMFLAG_FIN = 0x1 PARAMFLAG_FOUT = 0x2 PARAMFLAG_FLCID = 0x4 +PARAMFLAG_COMBINED = PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID + +VALID_PARAMFLAGS = ( + 0, + PARAMFLAG_FIN, + PARAMFLAG_FIN | PARAMFLAG_FOUT, + PARAMFLAG_FIN | PARAMFLAG_FLCID + ) + +WIN64 = sys.platform == 'win32' and sys.maxint == 2**63 - 1 + def get_com_error(errcode, riid, pIunk): "Win32 specific: build a COM Error exception" # XXX need C support code @@ -54,10 +65,11 @@ def _getargtypes(self): return self._argtypes_ + def _setargtypes(self, argtypes): self._ptr = None if argtypes is None: - self._argtypes_ = None + self._argtypes_ = () else: for i, argtype in enumerate(argtypes): if not hasattr(argtype, 'from_param'): @@ -65,35 +77,86 @@ "item %d in _argtypes_ has no from_param method" % ( i + 1,)) self._argtypes_ = argtypes + argtypes = property(_getargtypes, _setargtypes) + def _getparamflags(self): + return self._paramflags + + def _setparamflags(self, paramflags): + if paramflags is None or not self._argtypes_: + self._paramflags = None + return + if not isinstance(paramflags, tuple): + raise TypeError("paramflags must be a tuple or None") + if len(paramflags) != len(self._argtypes_): + raise ValueError("paramflags must have the same length as argtypes") + for idx, paramflag in enumerate(paramflags): + paramlen = len(paramflag) + name = default = None + if paramlen == 1: + flag = paramflag[0] + elif paramlen == 2: + flag, name = paramflag + elif paramlen == 3: + flag, name, default = paramflag + else: + raise TypeError( + "paramflags must be a sequence of (int [,string [,value]]) " + "tuples" + ) + if not isinstance(flag, int): + raise TypeError( + "paramflags must be a sequence of (int [,string [,value]]) " + "tuples" + ) + _flag = flag & PARAMFLAG_COMBINED + if _flag == PARAMFLAG_FOUT: + typ = self._argtypes_[idx] + if getattr(typ, '_ffiargshape', None) not in ('P', 'z', 'Z'): + raise TypeError( + "'out' parameter %d must be a pointer type, not %s" + % (idx+1, type(typ).__name__) + ) + elif _flag not in VALID_PARAMFLAGS: + raise TypeError("paramflag value %d not supported" % flag) + self._paramflags = paramflags + + paramflags = property(_getparamflags, _setparamflags) + def _getrestype(self): return self._restype_ + def _setrestype(self, restype): self._ptr = None if restype is int: from ctypes import c_int restype = c_int - if not isinstance(restype, _CDataMeta) and not restype is None and \ - not callable(restype): - raise TypeError("Expected ctypes type, got %s" % (restype,)) + if not (isinstance(restype, _CDataMeta) or restype is None or + callable(restype)): + raise TypeError("restype must be a type, a callable, or None") self._restype_ = restype + def _delrestype(self): self._ptr = None del self._restype_ + restype = property(_getrestype, _setrestype, _delrestype) def _geterrcheck(self): return getattr(self, '_errcheck_', None) + def _seterrcheck(self, errcheck): if not callable(errcheck): raise TypeError("The errcheck attribute must be callable") self._errcheck_ = errcheck + def _delerrcheck(self): try: del self._errcheck_ except AttributeError: pass + errcheck = property(_geterrcheck, _seterrcheck, _delerrcheck) def _ffishapes(self, args, restype): @@ -116,18 +179,18 @@ self._buffer = _rawffi.Array('P')(1) return - args = list(args) - argument = args.pop(0) + argsl = list(args) + argument = argsl.pop(0) # Direct construction from raw address - if isinstance(argument, (int, long)) and not args: + if isinstance(argument, (int, long)) and not argsl: ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) self._ptr = _rawffi.FuncPtr(argument, ffiargs, ffires, self._flags_) self._buffer = self._ptr.byptr() return - # A callback into python - if callable(argument) and not args: + # A callback into Python + if callable(argument) and not argsl: self.callable = argument ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) if self._restype_ is None: @@ -141,39 +204,32 @@ # Function exported from a shared library if isinstance(argument, tuple) and len(argument) == 2: import ctypes - name, dll = argument - # XXX Implement support for foreign function ordinal - if not isinstance(name, basestring): - raise NotImplementedError( - "Support for foreign functions exported by ordinal " - "hasn't been implemented yet." - ) - self.name = name + self.name, dll = argument if isinstance(dll, str): self.dll = ctypes.CDLL(dll) else: self.dll = dll - if args: - self._paramflags = args.pop(0) - if args: + if argsl: + self.paramflags = argsl.pop(0) + if argsl: raise TypeError("Unknown constructor %s" % (args,)) - # we need to check dll anyway + # We need to check dll anyway ptr = self._getfuncptr([], ctypes.c_int) self._buffer = ptr.byptr() return # A COM function call, by index if (sys.platform == 'win32' and isinstance(argument, (int, long)) - and args): + and argsl): ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) self._com_index = argument + 0x1000 - self.name = args.pop(0) - if args: - self._paramflags = args.pop(0) - if args: - raise TypeError("Unknown constructor %s" % (args,)) - # XXX Implement support for the optional ``iid`` pointer to the - # interface identifier used in extended error reporting. + self.name = argsl.pop(0) + if argsl: + self.paramflags = argsl.pop(0) + if argsl: + self._com_iid = argsl.pop(0) + if argsl: + raise TypeError("Unknown constructor %s" % (args,)) return raise TypeError("Unknown constructor %s" % (args,)) @@ -187,29 +243,30 @@ return f def __call__(self, *args, **kwargs): + argtypes = self._argtypes_ if self.callable is not None: - if len(args) == len(self._argtypes_): + if len(args) == len(argtypes): pass elif self._flags_ & _rawffi.FUNCFLAG_CDECL: - if len(args) < len(self._argtypes_): - plural = len(self._argtypes_) > 1 and "s" or "" + if len(args) < len(argtypes): + plural = len(argtypes) > 1 and "s" or "" raise TypeError( "This function takes at least %d argument%s (%s given)" - % (len(self._argtypes_), plural, len(args))) + % (len(argtypes), plural, len(args))) else: # For cdecl functions, we allow more actual arguments # than the length of the argtypes tuple. args = args[:len(self._argtypes_)] else: - plural = len(self._argtypes_) > 1 and "s" or "" + plural = len(argtypes) > 1 and "s" or "" raise TypeError( "This function takes %d argument%s (%s given)" - % (len(self._argtypes_), plural, len(args))) + % (len(argtypes), plural, len(args))) # check that arguments are convertible ## XXX Not as long as ctypes.cast is a callback function with ## py_object arguments... - ## self._convert_args(self._argtypes_, args) + ## self._convert_args(argtypes, args, {}) try: res = self.callable(*args) @@ -221,22 +278,26 @@ if self._restype_ is not None: return res return - argtypes = self._argtypes_ + + if argtypes is None: + argtypes = [] if self._com_index: from ctypes import cast, c_void_p, POINTER + if not args: + raise ValueError( + "native COM method call without 'this' parameter" + ) thisarg = cast(args[0], POINTER(POINTER(c_void_p))).contents argtypes = [c_void_p] + list(argtypes) args = list(args) args[0] = args[0].value else: thisarg = None - - if argtypes is None: - argtypes = [] - args, output_values = self._convert_args(argtypes, args, kwargs) + + args, outargs = self._convert_args(argtypes, args, kwargs) argtypes = [type(arg) for arg in args] - + restype = self._restype_ funcptr = self._getfuncptr(argtypes, restype, thisarg) if self._flags_ & _rawffi.FUNCFLAG_USE_ERRNO: @@ -251,7 +312,27 @@ set_errno(_rawffi.get_errno()) if self._flags_ & _rawffi.FUNCFLAG_USE_LASTERROR: set_last_error(_rawffi.get_last_error()) - result = self._build_result(restype, resbuffer, argtypes, args) + + result = None + if self._com_index: + if resbuffer[0] & 0x80000000: + raise get_com_error(resbuffer[0], + self._com_iid, args[0]) + else: + result = int(resbuffer[0]) + elif restype is not None: + checker = getattr(self.restype, '_check_retval_', None) + if checker: + val = restype(resbuffer[0]) + # the original ctypes seems to make the distinction between + # classes defining a new type, and their subclasses + if '_type_' in restype.__dict__: + val = val.value + result = checker(val) + elif not isinstance(restype, _CDataMeta): + result = restype(resbuffer[0]) + else: + result = restype._CData_retval(resbuffer) # The 'errcheck' protocol if self._errcheck_: @@ -264,13 +345,13 @@ if v is not args: result = v - if output_values: - if len(output_values) == 1: - return output_values[0] - return tuple(output_values) + if not outargs: + return result - return result + if len(outargs) == 1: + return outargs[0] + return tuple(outargs) def _getfuncptr(self, argtypes, restype, thisarg=None): if self._ptr is not None and argtypes is self._argtypes_: @@ -293,14 +374,17 @@ raise ValueError("COM method call without VTable") ptr = thisarg[self._com_index - 0x1000] return _rawffi.FuncPtr(ptr, argshapes, resshape, self._flags_) - + cdll = self.dll._handle try: return cdll.ptr(self.name, argshapes, resshape, self._flags_) except AttributeError: if self._flags_ & _rawffi.FUNCFLAG_CDECL: raise - + # Win64 has no stdcall calling conv, so it should also not have the + # name mangling of it. + if WIN64: + raise # For stdcall, try mangled names: # funcname -> _funcname@ # where n is 0, 4, 8, 12, ..., 128 @@ -320,7 +404,6 @@ arg = argtype.from_param(arg) if hasattr(arg, '_as_parameter_'): arg = arg._as_parameter_ - if isinstance(arg, _CData): # The usual case when argtype is defined cobj = arg @@ -334,134 +417,88 @@ cobj = c_int(arg) else: raise TypeError("Don't know how to handle %s" % (arg,)) - return cobj - def _convert_args(self, argtypes, args, kwargs): - wrapped_args = [] - output_values = [] - consumed = 0 + def _convert_args(self, argtypes, args, kwargs, marker=object()): + callargs = [] + outargs = [] + total = len(args) + paramflags = self._paramflags - # XXX Implement support for kwargs/name + if self._com_index: + inargs_idx = 1 + else: + inargs_idx = 0 + + if not paramflags and total < len(argtypes): + raise TypeError("not enough arguments") + for i, argtype in enumerate(argtypes): - defaultvalue = None - if self._paramflags is not None: - paramflag = self._paramflags[i] + flag = 0 + name = None + defval = marker + if paramflags: + paramflag = paramflags[i] paramlen = len(paramflag) name = None if paramlen == 1: - idlflag = paramflag[0] + flag = paramflag[0] elif paramlen == 2: - idlflag, name = paramflag + flag, name = paramflag elif paramlen == 3: - idlflag, name, defaultvalue = paramflag - idlflag &= (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID) - - if idlflag in (0, PARAMFLAG_FIN): - pass - elif idlflag == PARAMFLAG_FOUT: - import ctypes - val = argtype._type_() - output_values.append(val) - wrapped_args.append(ctypes.byref(val)) - continue - elif idlflag == PARAMFLAG_FIN | PARAMFLAG_FLCID: - # Always taken from defaultvalue if given, - # else the integer 0. - val = defaultvalue - if val is None: + flag, name, defval = paramflag + flag = flag & PARAMFLAG_COMBINED + if flag == PARAMFLAG_FIN | PARAMFLAG_FLCID: + val = defval + if val is marker: val = 0 wrapped = self._conv_param(argtype, val) - wrapped_args.append(wrapped) - continue + callargs.append(wrapped) + elif flag in (0, PARAMFLAG_FIN): + if inargs_idx < total: + val = args[inargs_idx] + inargs_idx += 1 + elif kwargs and name in kwargs: + val = kwargs[name] + inargs_idx += 1 + elif defval is not marker: + val = defval + elif name: + raise TypeError("required argument '%s' missing" % name) + else: + raise TypeError("not enough arguments") + wrapped = self._conv_param(argtype, val) + callargs.append(wrapped) + elif flag == PARAMFLAG_FOUT: + if defval is not marker: + outargs.append(defval) + wrapped = self._conv_param(argtype, defval) + else: + import ctypes + val = argtype._type_() + outargs.append(val) + wrapped = ctypes.byref(val) + callargs.append(wrapped) else: - raise NotImplementedError( - "paramflags = %s" % (self._paramflags[i],)) + raise ValueError("paramflag %d not yet implemented" % flag) + else: + try: + wrapped = self._conv_param(argtype, args[i]) + except (UnicodeError, TypeError, ValueError), e: + raise ArgumentError(str(e)) + callargs.append(wrapped) + inargs_idx += 1 - if consumed < len(args): - arg = args[consumed] - elif defaultvalue is not None: - arg = defaultvalue - else: - raise TypeError("Not enough arguments") - - try: - wrapped = self._conv_param(argtype, arg) - except (UnicodeError, TypeError, ValueError), e: - raise ArgumentError(str(e)) - wrapped_args.append(wrapped) - consumed += 1 - - if len(wrapped_args) < len(args): - extra = args[len(wrapped_args):] - argtypes = list(argtypes) + if len(callargs) < total: + extra = args[len(callargs):] for i, arg in enumerate(extra): try: wrapped = self._conv_param(None, arg) except (UnicodeError, TypeError, ValueError), e: raise ArgumentError(str(e)) - wrapped_args.append(wrapped) - return wrapped_args, output_values + callargs.append(wrapped) - def _build_result(self, restype, resbuffer, argtypes, argsandobjs): - """Build the function result: - If there is no OUT parameter, return the actual function result - If there is one OUT parameter, return it - If there are many OUT parameters, return a tuple""" - - retval = None - - if self._com_index: - if resbuffer[0] & 0x80000000: - raise get_com_error(resbuffer[0], - self._com_iid, argsandobjs[0]) - else: - retval = int(resbuffer[0]) - elif restype is not None: - checker = getattr(self.restype, '_check_retval_', None) - if checker: - val = restype(resbuffer[0]) - # the original ctypes seems to make the distinction between - # classes defining a new type, and their subclasses - if '_type_' in restype.__dict__: - val = val.value - retval = checker(val) - elif not isinstance(restype, _CDataMeta): - retval = restype(resbuffer[0]) - else: - retval = restype._CData_retval(resbuffer) - - results = [] - if self._paramflags: - for argtype, obj, paramflag in zip(argtypes[1:], argsandobjs[1:], - self._paramflags): - if len(paramflag) == 2: - idlflag, name = paramflag - elif len(paramflag) == 3: - idlflag, name, defaultvalue = paramflag - else: - idlflag = 0 - idlflag &= (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID) - - if idlflag in (0, PARAMFLAG_FIN): - pass - elif idlflag == PARAMFLAG_FOUT: - val = obj.__ctypes_from_outparam__() - results.append(val) - elif idlflag == PARAMFLAG_FIN | PARAMFLAG_FLCID: - pass - else: - raise NotImplementedError( - "paramflags = %s" % (paramflag,)) - - if results: - if len(results) == 1: - return results[0] - else: - return tuple(results) - - # No output parameter, return the actual function result. - return retval + return callargs, outargs def __nonzero__(self): return bool(self._buffer[0]) diff --git a/pypy/module/test_lib_pypy/ctypes_tests/test_prototypes.py b/pypy/module/test_lib_pypy/ctypes_tests/test_prototypes.py --- a/pypy/module/test_lib_pypy/ctypes_tests/test_prototypes.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/test_prototypes.py @@ -27,6 +27,55 @@ _ctypes_test = str(conftest.sofile) mod.testdll = CDLL(_ctypes_test) +class TestFuncPrototypes(BaseCTypesTestChecker): + + def test_restype_setattr(self): + func = testdll._testfunc_p_p + raises(TypeError, setattr, func, 'restype', 20) + + def test_argtypes_setattr(self): + func = testdll._testfunc_p_p + raises(TypeError, setattr, func, 'argtypes', 20) + raises(TypeError, setattr, func, 'argtypes', [20]) + + func = CFUNCTYPE(c_long, c_void_p, c_long)(lambda: None) + assert func.argtypes == (c_void_p, c_long) + + def test_paramflags_setattr(self): + func = CFUNCTYPE(c_long, c_void_p, c_long)(lambda: None) + raises(TypeError, setattr, func, 'paramflags', 'spam') + raises(ValueError, setattr, func, 'paramflags', (1, 2, 3, 4)) + raises(TypeError, setattr, func, 'paramflags', ((1,), ('a',))) + func.paramflags = (1,), (1|4,) + + def test_kwargs(self): + proto = CFUNCTYPE(c_char_p, c_char_p, c_int) + paramflags = (1, 'text', "tavino"), (1, 'letter', ord('v')) + func = proto(('my_strchr', testdll), paramflags) + assert func.argtypes == (c_char_p, c_int) + assert func.restype == c_char_p + + result = func("abcd", ord('b')) + assert result == "bcd" + + result = func() + assert result == "vino" + + result = func("grapevine") + assert result == "vine" + + result = func(text="grapevine") + assert result == "vine" + + result = func(letter=ord('i')) + assert result == "ino" + + result = func(letter=ord('p'), text="impossible") + assert result == "possible" + + result = func(text="impossible", letter=ord('p')) + assert result == "possible" + # Return machine address `a` as a (possibly long) non-negative integer. # Starting with Python 2.5, id(anything) is always non-negative, and # the ctypes addressof() inherits that via PyLong_FromVoidPtr(). From commits-noreply at bitbucket.org Sun Mar 13 20:30:15 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 20:30:15 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (alex, fijal): make jit aware of read_timestamp Message-ID: <20110313193015.B8BA7282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42573:8f146053b096 Date: 2011-03-13 15:30 -0400 http://bitbucket.org/pypy/pypy/changeset/8f146053b096/ Log: (alex, fijal): make jit aware of read_timestamp diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -237,7 +237,7 @@ NODE.become(lltype.GcStruct('NODE', ('value', lltype.Signed), ('next', lltype.Ptr(NODE)))) return NODE - + class OOJitMixin(JitMixin): type_system = 'ootype' #CPUClass = runner.OOtypeCPU @@ -271,7 +271,7 @@ return NODE -class BasicTests: +class BasicTests: def test_basic(self): def f(x, y): @@ -291,7 +291,7 @@ def test_uint_floordiv(self): from pypy.rlib.rarithmetic import r_uint - + def f(a, b): a = r_uint(a) b = r_uint(b) @@ -506,7 +506,7 @@ res = self.meta_interp(f, [6, 15], no_stats=True) finally: history.TreeLoop.__init__ = old_init - + assert res == f(6, 15) gc.collect() @@ -1094,7 +1094,7 @@ def test_bridge_from_interpreter_4(self): jitdriver = JitDriver(reds = ['n', 'k'], greens = []) - + def f(n, k): while n > 0: jitdriver.can_enter_jit(n=n, k=k) @@ -1107,7 +1107,7 @@ from pypy.rpython.test.test_llinterp import get_interpreter, clear_tcache from pypy.jit.metainterp.warmspot import WarmRunnerDesc - + interp, graph = get_interpreter(f, [0, 0], backendopt=False, inline_threshold=0, type_system=self.type_system) clear_tcache() @@ -1504,7 +1504,7 @@ return x res = self.meta_interp(f, [299], listops=True) assert res == f(299) - self.check_loops(guard_class=0, guard_value=2) + self.check_loops(guard_class=0, guard_value=2) self.check_loops(guard_class=0, guard_value=5, everywhere=True) def test_merge_guardnonnull_guardclass(self): @@ -1798,9 +1798,9 @@ def test_raw_malloc_and_access(self): from pypy.rpython.lltypesystem import rffi - + TP = rffi.CArray(lltype.Signed) - + def f(n): a = lltype.malloc(TP, n, flavor='raw') a[0] = n @@ -1813,9 +1813,9 @@ def test_raw_malloc_and_access_float(self): from pypy.rpython.lltypesystem import rffi - + TP = rffi.CArray(lltype.Float) - + def f(n, f): a = lltype.malloc(TP, n, flavor='raw') a[0] = f @@ -2118,7 +2118,7 @@ def test_dont_trace_every_iteration(self): myjitdriver = JitDriver(greens = [], reds = ['a', 'b', 'i', 'sa']) - + def main(a, b): i = sa = 0 #while i < 200: @@ -2214,7 +2214,7 @@ return n res = self.meta_interp(f, [sys.maxint>>10]) assert res == 11 - self.check_tree_loop_count(2) + self.check_tree_loop_count(2) def test_wrap_around_sub(self): myjitdriver = JitDriver(greens = [], reds = ['x', 'n']) @@ -2230,7 +2230,15 @@ return n res = self.meta_interp(f, [10-sys.maxint]) assert res == 12 - self.check_tree_loop_count(2) + self.check_tree_loop_count(2) + + def test_read_timestamp(self): + from pypy.rlib.test.test_rtimer import timer + def f(): + diff = timer() + return diff > 1000 + res = self.interp_operations(f, []) + assert res @@ -2308,7 +2316,7 @@ policy=StopAtXPolicy(getcls), optimizer=OPTIMIZER_SIMPLE) assert not res - + res = self.meta_interp(f, [0, 100], policy=StopAtXPolicy(getcls), optimizer=OPTIMIZER_SIMPLE) @@ -2328,7 +2336,7 @@ def test_oops_on_nongc(self): from pypy.rpython.lltypesystem import lltype - + TP = lltype.Struct('x') def f(i1, i2): p1 = prebuilt[i1] diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -23,7 +23,7 @@ # methods implemented by each concrete class # ------------------------------------------ - + def getopnum(self): raise NotImplementedError @@ -234,7 +234,7 @@ def getarg(self, i): raise IndexError - + def setarg(self, i, box): raise IndexError @@ -258,7 +258,7 @@ return self._arg0 else: raise IndexError - + def setarg(self, i, box): if i == 0: self._arg0 = box @@ -288,7 +288,7 @@ return self._arg1 else: raise IndexError - + def setarg(self, i, box): if i == 0: self._arg0 = box @@ -326,7 +326,7 @@ return self._arg2 else: raise IndexError - + def setarg(self, i, box): if i == 0: self._arg0 = box @@ -352,7 +352,7 @@ def getarg(self, i): return self._args[i] - + def setarg(self, i, box): self._args[i] = box @@ -460,6 +460,7 @@ '_MALLOC_LAST', 'FORCE_TOKEN/0', 'VIRTUAL_REF/2', # removed before it's passed to the backend + 'READ_TIMESTAMP/0', '_NOSIDEEFFECT_LAST', # ----- end of no_side_effect operations ----- 'SETARRAYITEM_GC/3d', @@ -468,7 +469,7 @@ 'SETFIELD_RAW/2d', 'STRSETITEM/3', 'UNICODESETITEM/3', - #'RUNTIMENEW/1', # ootype operation + #'RUNTIMENEW/1', # ootype operation 'COND_CALL_GC_WB/2d', # [objptr, newvalue] (for the write barrier) 'DEBUG_MERGE_POINT/2', # debugging only 'JIT_DEBUG/*', # debugging only @@ -555,7 +556,7 @@ 2: BinaryOp, 3: TernaryOp } - + is_guard = name.startswith('GUARD') if is_guard: assert withdescr diff --git a/pypy/jit/metainterp/executor.py b/pypy/jit/metainterp/executor.py --- a/pypy/jit/metainterp/executor.py +++ b/pypy/jit/metainterp/executor.py @@ -6,6 +6,7 @@ from pypy.rpython.ootypesystem import ootype from pypy.rpython.lltypesystem.lloperation import llop from pypy.rlib.rarithmetic import ovfcheck, r_uint, intmask +from pypy.rlib.rtimer import c_read_timestamp from pypy.rlib.unroll import unrolling_iterable from pypy.jit.metainterp.history import BoxInt, BoxPtr, BoxFloat, check_descr from pypy.jit.metainterp.history import INT, REF, FLOAT, VOID, AbstractDescr @@ -227,6 +228,9 @@ length = lengthbox.getint() rstr.copy_unicode_contents(src, dst, srcstart, dststart, length) +def do_read_timestamp(cpu, _): + return BoxFloat(c_read_timestamp()) + # ____________________________________________________________ ##def do_force_token(cpu): diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -834,7 +834,7 @@ jcposition, redboxes): resumedescr = compile.ResumeAtPositionDescr() self.capture_resumedata(resumedescr, orgpc) - + any_operation = len(self.metainterp.history.operations) > 0 jitdriver_sd = self.metainterp.staticdata.jitdrivers_sd[jdindex] self.verify_green_args(jitdriver_sd, greenboxes) @@ -852,7 +852,7 @@ "found a loop_header for a JitDriver that does not match " "the following jit_merge_point's") self.metainterp.seen_loop_header_for_jdindex = -1 - + # if not self.metainterp.in_recursion: assert jitdriver_sd is self.metainterp.jitdriver_sd @@ -1023,6 +1023,10 @@ metainterp.history.record(rop.VIRTUAL_REF_FINISH, [vrefbox, lastbox], None) + @arguments() + def opimpl_ll_read_timestamp(self): + return self.metainterp.execute_and_record(rop.READ_TIMESTAMP, None) + # ------------------------------ def setup_call(self, argboxes): @@ -1719,7 +1723,7 @@ dont_change_position = True else: dont_change_position = False - try: + try: self.prepare_resume_from_failure(key.guard_opnum, dont_change_position) if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(ABORT_BRIDGE) @@ -1924,7 +1928,7 @@ self.history.inputargs = original_inputargs self.history.operations = self.history.operations[:start] - + self.history.record(rop.JUMP, bridge_arg_boxes[num_green_args:], None) try: target_loop_token = compile.compile_new_bridge(self, diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py --- a/pypy/jit/metainterp/blackhole.py +++ b/pypy/jit/metainterp/blackhole.py @@ -1,4 +1,5 @@ from pypy.rlib.unroll import unrolling_iterable +from pypy.rlib.rtimer import c_read_timestamp from pypy.rlib.rarithmetic import intmask, LONG_BIT, r_uint, ovfcheck from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.debug import debug_start, debug_stop @@ -1205,6 +1206,10 @@ def bhimpl_unicodesetitem(cpu, unicode, index, newchr): cpu.bh_unicodesetitem(unicode, index, newchr) + @arguments(returns="f") + def bhimpl_ll_read_timestamp(): + return c_read_timestamp() + # ---------- # helpers to resume running in blackhole mode when a guard failed @@ -1416,7 +1421,7 @@ current_exc = blackholeinterp._prepare_resume_from_failure( resumedescr.guard_opnum, dont_change_position) - + try: _run_forever(blackholeinterp, current_exc) finally: From commits-noreply at bitbucket.org Sun Mar 13 21:05:04 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 21:05:04 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (alex, fijal): Encoding read_timestamp in x86. Message-ID: <20110313200504.41451282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42574:9f792994603c Date: 2011-03-13 16:04 -0400 http://bitbucket.org/pypy/pypy/changeset/9f792994603c/ Log: (alex, fijal): Encoding read_timestamp in x86. diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -400,12 +400,12 @@ # Method names take the form of -# +# # _ # # For example, the method name for "mov reg, immed" is MOV_ri. Operand order # is Intel-style, with the destination first. -# +# # The operand type codes are: # r - register # b - ebp/rbp offset @@ -540,6 +540,9 @@ # x87 instructions FSTP_b = insn('\xDD', orbyte(3<<3), stack_bp(1)) + # ------------------------------ Random mess ----------------------- + RDTSC = insn('\x0F\x31') + # ------------------------------ SSE2 ------------------------------ # Conversion diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -301,7 +301,7 @@ if log: self._register_counter() operations = self._inject_debugging_code(looptoken, operations) - + regalloc = RegAlloc(self, self.cpu.translate_support_code) arglocs = regalloc.prepare_loop(inputargs, operations, looptoken) looptoken._x86_arglocs = arglocs @@ -310,7 +310,7 @@ stackadjustpos = self._assemble_bootstrap_code(inputargs, arglocs) self.looppos = self.mc.get_relative_pos() looptoken._x86_frame_depth = -1 # temporarily - looptoken._x86_param_depth = -1 # temporarily + looptoken._x86_param_depth = -1 # temporarily frame_depth, param_depth = self._assemble(regalloc, operations) looptoken._x86_frame_depth = frame_depth looptoken._x86_param_depth = param_depth @@ -509,7 +509,7 @@ def _assemble(self, regalloc, operations): self._regalloc = regalloc - regalloc.walk_operations(operations) + regalloc.walk_operations(operations) if we_are_translated() or self.cpu.dont_keepalive_stuff: self._regalloc = None # else keep it around for debugging frame_depth = regalloc.fm.frame_depth @@ -957,7 +957,7 @@ dst_locs.append(unused_gpr.pop()) else: pass_on_stack.append(loc) - + # Emit instructions to pass the stack arguments # XXX: Would be nice to let remap_frame_layout take care of this, but # we'd need to create something like StackLoc, but relative to esp, @@ -1363,6 +1363,15 @@ else: assert 0, itemsize + def genop_read_timestamp(self, op, arglocs, resloc): + # XXX cheat + addr1 = self.fail_boxes_int.get_addr_for_num(0) + addr2 = self.fail_boxes_int.get_addr_for_num(1) + self.mc.RDTSC() + self.mc.MOV(heap(addr1), eax) + self.mc.MOV(heap(addr2), edx) + self.mc.MOVSD(resloc, heap(addr1)) + def genop_guard_guard_true(self, ign_1, guard_op, guard_token, locs, ign_2): loc = locs[0] self.mc.TEST(loc, loc) @@ -1796,7 +1805,7 @@ tmp = ecx else: tmp = eax - + self._emit_call(x, arglocs, 3, tmp=tmp) if IS_X86_32 and isinstance(resloc, StackLoc) and resloc.width == 8: @@ -2040,7 +2049,7 @@ # on 64-bits, 'tid' is a value that fits in 31 bits self.mc.MOV_mi((eax.value, 0), tid) self.mc.MOV(heap(nursery_free_adr), edx) - + genop_discard_list = [Assembler386.not_implemented_op_discard] * rop._LAST genop_list = [Assembler386.not_implemented_op] * rop._LAST genop_llong_list = {} @@ -2051,7 +2060,7 @@ opname = name[len('genop_discard_'):] num = getattr(rop, opname.upper()) genop_discard_list[num] = value - elif name.startswith('genop_guard_') and name != 'genop_guard_exception': + elif name.startswith('genop_guard_') and name != 'genop_guard_exception': opname = name[len('genop_guard_'):] num = getattr(rop, opname.upper()) genop_guard_list[num] = value diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -2233,10 +2233,19 @@ self.check_tree_loop_count(2) def test_read_timestamp(self): - from pypy.rlib.test.test_rtimer import timer + import time + from pypy.rlib.rtimer import read_timestamp + def busy_loop(): + start = time.time() + while time.time() - start < 0.1: + # busy wait + pass + def f(): - diff = timer() - return diff > 1000 + t1 = read_timestamp() + busy_loop() + t2 = read_timestamp() + return t2 - t1 > 1000 res = self.interp_operations(f, []) assert res diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -25,6 +25,7 @@ from pypy.rlib.objectmodel import ComputedIntSymbolic, we_are_translated from pypy.rlib.rarithmetic import ovfcheck from pypy.rlib.rarithmetic import r_longlong, r_ulonglong, r_uint +from pypy.rlib.rtimer import c_read_timestamp import py from pypy.tool.ansi_print import ansi_log @@ -506,7 +507,7 @@ ', '.join(map(str, args)),)) self.fail_args = args return op.fail_index - + else: assert 0, "unknown final operation %d" % (op.opnum,) @@ -856,6 +857,9 @@ opaque_frame = _to_opaque(self) return llmemory.cast_ptr_to_adr(opaque_frame) + def op_read_timestamp(self, descr): + return c_read_timestamp() + def op_call_may_force(self, calldescr, func, *args): assert not self._forced self._may_force = self.opindex @@ -937,7 +941,7 @@ class OOFrame(Frame): OPHANDLERS = [None] * (rop._LAST+1) - + def op_new_with_vtable(self, descr, vtable): assert descr is None typedescr = get_class_size(self.memocast, vtable) @@ -958,7 +962,7 @@ return res op_getfield_gc_pure = op_getfield_gc - + def op_setfield_gc(self, fielddescr, obj, newvalue): TYPE = fielddescr.TYPE fieldname = fielddescr.fieldname diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -138,7 +138,7 @@ xmm_reg_mgr_cls = X86_64_XMMRegisterManager else: raise AssertionError("Word size should be 4 or 8") - + self.rm = gpr_reg_mgr_cls(longevity, frame_manager = self.fm, assembler = self.assembler) @@ -341,7 +341,7 @@ self.assembler.regalloc_perform_guard(guard_op, faillocs, arglocs, result_loc, current_depths) - self.possibly_free_vars(guard_op.getfailargs()) + self.possibly_free_vars(guard_op.getfailargs()) def PerformDiscard(self, op, arglocs): if not we_are_translated(): @@ -417,7 +417,7 @@ assert isinstance(arg, Box) if arg not in last_used: last_used[arg] = i - + longevity = {} for arg in produced: if arg in last_used: @@ -807,7 +807,7 @@ self._call(op, [imm(size), vable] + [self.loc(op.getarg(i)) for i in range(op.numargs())], guard_not_forced_op=guard_op) - + def consider_cond_call_gc_wb(self, op): assert op.result is None args = op.getarglist() @@ -1156,6 +1156,16 @@ else: raise AssertionError("bad unicode item size") + def consider_read_timestamp(self, op): + tmpbox_high = TempBox() + tmpbox_low = TempBox() + self.rm.force_allocate_reg(tmpbox_high, selected_reg=eax) + self.rm.force_allocate_reg(tmpbox_low, selected_reg=edx) + result_loc = self.xrm.force_allocate_reg(op.result) + self.Perform(op, [], result_loc) + self.rm.possibly_free_var(tmpbox_high) + self.rm.possibly_free_var(tmpbox_low) + def consider_jump(self, op): assembler = self.assembler assert self.jump_target_descr is None @@ -1172,13 +1182,13 @@ xmmtmploc = self.xrm.force_allocate_reg(box1, selected_reg=xmmtmp) # Part about non-floats # XXX we don't need a copy, we only just the original list - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type != FLOAT] assert tmploc not in nonfloatlocs dst_locations = [loc for loc in nonfloatlocs if loc is not None] remap_frame_layout(assembler, src_locations, dst_locations, tmploc) # Part about floats - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type == FLOAT] dst_locations = [loc for loc in floatlocs if loc is not None] remap_frame_layout(assembler, src_locations, dst_locations, xmmtmp) From commits-noreply at bitbucket.org Sun Mar 13 21:24:27 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 13 Mar 2011 21:24:27 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (fijal, alex): make _lsprof use the new timer. Message-ID: <20110313202427.00C70282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42575:3bb134ca6f6d Date: 2011-03-13 16:24 -0400 http://bitbucket.org/pypy/pypy/changeset/3bb134ca6f6d/ Log: (fijal, alex): make _lsprof use the new timer. diff --git a/pypy/module/_lsprof/interp_lsprof.py b/pypy/module/_lsprof/interp_lsprof.py --- a/pypy/module/_lsprof/interp_lsprof.py +++ b/pypy/module/_lsprof/interp_lsprof.py @@ -7,6 +7,7 @@ from pypy.interpreter.typedef import (TypeDef, GetSetProperty, interp_attrproperty) from pypy.rlib import jit +from pypy.rlib.rtimer import read_timestamp from pypy.rpython.lltypesystem import rffi import time, sys @@ -229,7 +230,7 @@ e.write_unraisable(space, "timer function ", self.w_callable) return 0.0 - return read_timestamp_double() + return float(read_timestamp()) def enable(self, space, w_subcalls=NoneNotWrapped, w_builtins=NoneNotWrapped): diff --git a/pypy/module/_lsprof/test/test_cprofile.py b/pypy/module/_lsprof/test/test_cprofile.py --- a/pypy/module/_lsprof/test/test_cprofile.py +++ b/pypy/module/_lsprof/test/test_cprofile.py @@ -91,6 +91,19 @@ assert spam2bar.inlinetime == 1.0 assert spam2bar.totaltime == 1.0 + def test_direct_read_timestamp(self): + import _lsprof + + def f(): + pass + + profiler = _lsprof.Profiler() + profiler.enable() + f() + profiler.disable() + stats = profiler.getstats() + xxx + def test_cprofile(self): import sys, os # XXX this is evil trickery to walk around the fact that we don't From commits-noreply at bitbucket.org Sun Mar 13 23:41:28 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sun, 13 Mar 2011 23:41:28 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: RWeakValueDictionary only accept string keys. Message-ID: <20110313224128.247EC282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42576:a6ada4570450 Date: 2011-03-12 00:18 +0100 http://bitbucket.org/pypy/pypy/changeset/a6ada4570450/ Log: RWeakValueDictionary only accept string keys. Try to have it accept other types From commits-noreply at bitbucket.org Sun Mar 13 23:41:34 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sun, 13 Mar 2011 23:41:34 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: Add "keyclass" to RWeakValueDictionary constructor Message-ID: <20110313224134.1FB21282BDA@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42577:37634fa88b3c Date: 2011-03-12 00:23 +0100 http://bitbucket.org/pypy/pypy/changeset/37634fa88b3c/ Log: Add "keyclass" to RWeakValueDictionary constructor diff --git a/pypy/rlib/rweakref.py b/pypy/rlib/rweakref.py --- a/pypy/rlib/rweakref.py +++ b/pypy/rlib/rweakref.py @@ -12,14 +12,17 @@ Only supports string keys. """ - def __init__(self, valueclass): + def __init__(self, keyclass, valueclass): self._dict = weakref.WeakValueDictionary() + self._keyclass = keyclass self._valueclass = valueclass def get(self, key): + assert isinstance(key, self._keyclass) return self._dict.get(key, None) def set(self, key, value): + assert isinstance(key, self._keyclass) if value is None: self._dict.pop(key, None) else: @@ -94,7 +97,7 @@ class Entry(extregistry.ExtRegistryEntry): _about_ = RWeakValueDictionary - def compute_result_annotation(self, s_valueclass): + def compute_result_annotation(self, s_keyclass, s_valueclass): return SomeWeakValueDict(_getclassdef(s_valueclass)) def specialize_call(self, hop): diff --git a/pypy/rlib/test/test_rweakvaldict.py b/pypy/rlib/test/test_rweakvaldict.py --- a/pypy/rlib/test/test_rweakvaldict.py +++ b/pypy/rlib/test/test_rweakvaldict.py @@ -23,7 +23,7 @@ assert d.get("hello") is None return x1, x3 # x2 dies def f(): - d = RWeakValueDictionary(X) + d = RWeakValueDictionary(str, X) x1, x3 = g(d) rgc.collect(); rgc.collect() assert d.get("abc") is x1 @@ -70,7 +70,7 @@ interpret(make_test(loop=12), []) def test_rpython_prebuilt(): - d = RWeakValueDictionary(X) + d = RWeakValueDictionary(str, X) living = [X() for i in range(8)] for i in range(8): d.set(str(i), living[i]) @@ -89,13 +89,13 @@ interpret(f, []) def test_rpython_merge_RWeakValueDictionary(): - empty = RWeakValueDictionary(X) + empty = RWeakValueDictionary(str, X) def f(n): x = X() if n: d = empty else: - d = RWeakValueDictionary(X) + d = RWeakValueDictionary(str, X) d.set("a", x) return d.get("a") is x assert f(0) @@ -107,7 +107,7 @@ def test_rpython_merge_RWeakValueDictionary2(): class A(object): def __init__(self): - self.d = RWeakValueDictionary(A) + self.d = RWeakValueDictionary(str, A) def f(self, key): a = A() self.d.set(key, a) @@ -126,8 +126,8 @@ def g(x): if x: - d = RWeakValueDictionary(X) + d = RWeakValueDictionary(str, X) else: - d = RWeakValueDictionary(Y) + d = RWeakValueDictionary(str, Y) d.set("x", X()) py.test.raises(Exception, interpret, g, [1]) From commits-noreply at bitbucket.org Sun Mar 13 23:41:35 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sun, 13 Mar 2011 23:41:35 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: RWeakValueDictionary now accept different kinds of keys. Message-ID: <20110313224135.4C818282BDA@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42578:76f8aa04732d Date: 2011-03-13 22:18 +0100 http://bitbucket.org/pypy/pypy/changeset/76f8aa04732d/ Log: RWeakValueDictionary now accept different kinds of keys. Only tested with str and int. diff --git a/pypy/rlib/rweakref.py b/pypy/rlib/rweakref.py --- a/pypy/rlib/rweakref.py +++ b/pypy/rlib/rweakref.py @@ -8,9 +8,7 @@ class RWeakValueDictionary(object): - """A limited dictionary containing weak values. - Only supports string keys. - """ + """A dictionary containing weak values.""" def __init__(self, keyclass, valueclass): self._dict = weakref.WeakValueDictionary() @@ -70,18 +68,19 @@ class SomeWeakValueDict(annmodel.SomeObject): knowntype = RWeakValueDictionary - def __init__(self, valueclassdef): + def __init__(self, s_key, valueclassdef): + self.s_key = s_key self.valueclassdef = valueclassdef def rtyper_makerepr(self, rtyper): from pypy.rlib import _rweakvaldict - return _rweakvaldict.WeakValueDictRepr(rtyper) + return _rweakvaldict.WeakValueDictRepr(rtyper, + rtyper.makerepr(self.s_key)) def rtyper_makekey_ex(self, rtyper): return self.__class__, def method_get(self, s_key): - assert annmodel.SomeString(can_be_None=True).contains(s_key) return annmodel.SomeInstance(self.valueclassdef, can_be_None=True) def method_set(self, s_key, s_value): @@ -91,18 +90,23 @@ class __extend__(pairtype(SomeWeakValueDict, SomeWeakValueDict)): def union((s_wvd1, s_wvd2)): if s_wvd1.valueclassdef is not s_wvd2.valueclassdef: - return SomeObject() # not the same class! complain... - return SomeWeakValueDict(s_wvd1.valueclassdef) + return annmodel.SomeObject() # not the same class! complain... + s_key = annmodel.unionof(s_wvd1.s_key, s_wvd2.s_key) + return SomeWeakValueDict(s_key, s_wvd1.valueclassdef) class Entry(extregistry.ExtRegistryEntry): _about_ = RWeakValueDictionary def compute_result_annotation(self, s_keyclass, s_valueclass): - return SomeWeakValueDict(_getclassdef(s_valueclass)) + assert s_keyclass.is_constant() + s_key = self.bookkeeper.immutablevalue(s_keyclass.const()) + return SomeWeakValueDict( + s_key, + _getclassdef(s_valueclass)) def specialize_call(self, hop): from pypy.rlib import _rweakvaldict - return _rweakvaldict.specialize_make_weakdict(hop) + return _rweakvaldict.specialize_make_weakdict(hop, hop.r_result.traits) class Entry(extregistry.ExtRegistryEntry): _type_ = RWeakValueDictionary @@ -110,7 +114,9 @@ def compute_annotation(self): bk = self.bookkeeper x = self.instance - return SomeWeakValueDict(bk.getuniqueclassdef(x._valueclass)) + return SomeWeakValueDict( + bk.immutablevalue(x._keyclass()), + bk.getuniqueclassdef(x._valueclass)) def _getclassdef(s_instance): assert isinstance(s_instance, annmodel.SomePBC) diff --git a/pypy/rlib/_rweakvaldict.py b/pypy/rlib/_rweakvaldict.py --- a/pypy/rlib/_rweakvaldict.py +++ b/pypy/rlib/_rweakvaldict.py @@ -3,15 +3,18 @@ from pypy.rpython.lltypesystem.llmemory import weakref_create, weakref_deref from pypy.rpython.lltypesystem.lloperation import llop from pypy.rpython.rclass import getinstancerepr +from pypy.rpython.rint import signed_repr from pypy.rpython.rmodel import Repr from pypy.rlib.rweakref import RWeakValueDictionary from pypy.rlib import jit class WeakValueDictRepr(Repr): - def __init__(self, rtyper): + def __init__(self, rtyper, r_key): self.rtyper = rtyper - self.lowleveltype = lltype.Ptr(WEAKDICT) + self.r_key = r_key + self.traits = make_WEAKDICT(r_key) + self.lowleveltype = lltype.Ptr(self.traits.WEAKDICT) self.dict_cache = {} def convert_const(self, weakdict): @@ -23,150 +26,163 @@ return self.dict_cache[key] except KeyError: self.setup() - l_dict = ll_new_weakdict() + l_dict = self.traits.ll_new_weakdict() self.dict_cache[key] = l_dict bk = self.rtyper.annotator.bookkeeper classdef = bk.getuniqueclassdef(weakdict._valueclass) - r_key = rstr.string_repr r_value = getinstancerepr(self.rtyper, classdef) for dictkey, dictvalue in weakdict._dict.items(): - llkey = r_key.convert_const(dictkey) + llkey = self.r_key.convert_const(dictkey) llvalue = r_value.convert_const(dictvalue) if llvalue: llvalue = lltype.cast_pointer(rclass.OBJECTPTR, llvalue) - ll_set_nonnull(l_dict, llkey, llvalue) + self.traits.ll_set_nonnull(l_dict, llkey, llvalue) return l_dict def rtype_method_get(self, hop): - v_d, v_key = hop.inputargs(self, rstr.string_repr) + + v_d, v_key = hop.inputargs(self, self.r_key) hop.exception_cannot_occur() - v_result = hop.gendirectcall(ll_get, v_d, v_key) + v_result = hop.gendirectcall(self.traits.ll_get, v_d, v_key) v_result = hop.genop("cast_pointer", [v_result], resulttype=hop.r_result.lowleveltype) return v_result def rtype_method_set(self, hop): r_object = getinstancerepr(self.rtyper, None) - v_d, v_key, v_value = hop.inputargs(self, rstr.string_repr, - r_object) + v_d, v_key, v_value = hop.inputargs(self, self.r_key, r_object) hop.exception_cannot_occur() if hop.args_s[2].is_constant() and hop.args_s[2].const is None: - hop.gendirectcall(ll_set_null, v_d, v_key) + hop.gendirectcall(self.traits.ll_set_null, v_d, v_key) else: - hop.gendirectcall(ll_set, v_d, v_key, v_value) + hop.gendirectcall(self.traits.ll_set, v_d, v_key, v_value) -def specialize_make_weakdict(hop): +def specialize_make_weakdict(hop, traits): hop.exception_cannot_occur() - v_d = hop.gendirectcall(ll_new_weakdict) + v_d = hop.gendirectcall(traits.ll_new_weakdict) return v_d # ____________________________________________________________ +def make_WEAKDICT(r_key): + KEY = r_key.lowleveltype + ll_keyhash = r_key.get_ll_hash_function() + if isinstance(KEY, lltype.Ptr): + zero_key = r_key.convert_const(None) + else: + zero_key = r_key.convert_const(0) -WEAKDICTENTRY = lltype.Struct("weakdictentry", - ("key", lltype.Ptr(rstr.STR)), - ("value", llmemory.WeakRefPtr)) + WEAKDICTENTRY = lltype.Struct("weakdictentry", + ("key", KEY), + ("value", llmemory.WeakRefPtr)) -def ll_valid(entries, i): - value = entries[i].value - return bool(value) and bool(weakref_deref(rclass.OBJECTPTR, value)) + def ll_valid(entries, i): + value = entries[i].value + return bool(value) and bool(weakref_deref(rclass.OBJECTPTR, value)) -def ll_everused(entries, i): - return bool(entries[i].value) + def ll_everused(entries, i): + return bool(entries[i].value) -def ll_hash(entries, i): - return str_fasthashfn(entries[i].key) -str_fasthashfn = rstr.string_repr.get_ll_fasthash_function() + def ll_hash(entries, i): + return fasthashfn(entries[i].key) + fasthashfn = r_key.get_ll_fasthash_function() -entrymeths = { - 'allocate': lltype.typeMethod(rdict._ll_malloc_entries), - 'delete': rdict._ll_free_entries, - 'valid': ll_valid, - 'everused': ll_everused, - 'hash': ll_hash, - } -WEAKDICTENTRYARRAY = lltype.GcArray(WEAKDICTENTRY, - adtmeths=entrymeths, - hints={'weakarray': 'value'}) -# NB. the 'hints' is not used so far ^^^ + entrymeths = { + 'allocate': lltype.typeMethod(rdict._ll_malloc_entries), + 'delete': rdict._ll_free_entries, + 'valid': ll_valid, + 'everused': ll_everused, + 'hash': ll_hash, + } + WEAKDICTENTRYARRAY = lltype.GcArray(WEAKDICTENTRY, + adtmeths=entrymeths, + hints={'weakarray': 'value'}) + # NB. the 'hints' is not used so far ^^^ -ll_strhash = rstr.LLHelpers.ll_strhash + class Traits: + @staticmethod + @jit.dont_look_inside + def ll_new_weakdict(): + d = lltype.malloc(Traits.WEAKDICT) + d.entries = Traits.WEAKDICT.entries.TO.allocate(rdict.DICT_INITSIZE) + d.num_items = 0 + d.num_pristine_entries = rdict.DICT_INITSIZE + return d - at jit.dont_look_inside -def ll_new_weakdict(): - d = lltype.malloc(WEAKDICT) - d.entries = WEAKDICT.entries.TO.allocate(rdict.DICT_INITSIZE) - d.num_items = 0 - d.num_pristine_entries = rdict.DICT_INITSIZE - return d + @staticmethod + @jit.dont_look_inside + def ll_get(d, llkey): + hash = ll_keyhash(llkey) + i = rdict.ll_dict_lookup(d, llkey, hash) + #llop.debug_print(lltype.Void, i, 'get') + valueref = d.entries[i].value + if valueref: + return weakref_deref(rclass.OBJECTPTR, valueref) + else: + return lltype.nullptr(rclass.OBJECTPTR.TO) - at jit.dont_look_inside -def ll_get(d, llkey): - hash = ll_strhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) - #llop.debug_print(lltype.Void, i, 'get') - valueref = d.entries[i].value - if valueref: - return weakref_deref(rclass.OBJECTPTR, valueref) - else: - return lltype.nullptr(rclass.OBJECTPTR.TO) + @staticmethod + @jit.dont_look_inside + def ll_set(d, llkey, llvalue): + if llvalue: + Traits.ll_set_nonnull(d, llkey, llvalue) + else: + Traits.ll_set_null(d, llkey) - at jit.dont_look_inside -def ll_set(d, llkey, llvalue): - if llvalue: - ll_set_nonnull(d, llkey, llvalue) - else: - ll_set_null(d, llkey) + @staticmethod + @jit.dont_look_inside + def ll_set_nonnull(d, llkey, llvalue): + hash = ll_keyhash(llkey) + valueref = weakref_create(llvalue) # GC effects here, before the rest + i = rdict.ll_dict_lookup(d, llkey, hash) + everused = d.entries.everused(i) + d.entries[i].key = llkey + d.entries[i].value = valueref + #llop.debug_print(lltype.Void, i, 'stored') + if not everused: + d.num_pristine_entries -= 1 + if d.num_pristine_entries * 3 <= len(d.entries): + #llop.debug_print(lltype.Void, 'RESIZE') + Traits.ll_weakdict_resize(d) - at jit.dont_look_inside -def ll_set_nonnull(d, llkey, llvalue): - hash = ll_strhash(llkey) - valueref = weakref_create(llvalue) # GC effects here, before the rest - i = rdict.ll_dict_lookup(d, llkey, hash) - everused = d.entries.everused(i) - d.entries[i].key = llkey - d.entries[i].value = valueref - #llop.debug_print(lltype.Void, i, 'stored') - if not everused: - d.num_pristine_entries -= 1 - if d.num_pristine_entries * 3 <= len(d.entries): - #llop.debug_print(lltype.Void, 'RESIZE') - ll_weakdict_resize(d) + @staticmethod + @jit.dont_look_inside + def ll_set_null(d, llkey): + hash = ll_keyhash(llkey) + i = rdict.ll_dict_lookup(d, llkey, hash) + if d.entries.everused(i): + # If the entry was ever used, clean up its key and value. + # We don't store a NULL value, but a dead weakref, because + # the entry must still be marked as everused(). + d.entries[i].value = llmemory.dead_wref + d.entries[i].key = zero_key + #llop.debug_print(lltype.Void, i, 'zero') - at jit.dont_look_inside -def ll_set_null(d, llkey): - hash = ll_strhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) - if d.entries.everused(i): - # If the entry was ever used, clean up its key and value. - # We don't store a NULL value, but a dead weakref, because - # the entry must still be marked as everused(). - d.entries[i].value = llmemory.dead_wref - d.entries[i].key = lltype.nullptr(rstr.STR) - #llop.debug_print(lltype.Void, i, 'zero') + @staticmethod + def ll_weakdict_resize(d): + # first set num_items to its correct, up-to-date value + entries = d.entries + num_items = 0 + for i in range(len(entries)): + if entries.valid(i): + num_items += 1 + d.num_items = num_items + rdict.ll_dict_resize(d) -def ll_weakdict_resize(d): - # first set num_items to its correct, up-to-date value - entries = d.entries - num_items = 0 - for i in range(len(entries)): - if entries.valid(i): - num_items += 1 - d.num_items = num_items - rdict.ll_dict_resize(d) + ll_keyeq = lltype.staticAdtMethod(r_key.get_ll_eq_function()) -str_keyeq = lltype.staticAdtMethod(rstr.string_repr.get_ll_eq_function()) + dictmeths = { + 'll_get': ll_get, + 'll_set': ll_set, + 'keyeq': ll_keyeq, + 'paranoia': False, + } -dictmeths = { - 'll_get': ll_get, - 'll_set': ll_set, - 'keyeq': str_keyeq, - 'paranoia': False, - } + WEAKDICT = lltype.GcStruct("weakvaldict", + ("num_items", lltype.Signed), + ("num_pristine_entries", lltype.Signed), + ("entries", lltype.Ptr(WEAKDICTENTRYARRAY)), + adtmeths=dictmeths) -WEAKDICT = lltype.GcStruct("weakvaldict", - ("num_items", lltype.Signed), - ("num_pristine_entries", lltype.Signed), - ("entries", lltype.Ptr(WEAKDICTENTRYARRAY)), - adtmeths=dictmeths) + return Traits diff --git a/pypy/rlib/test/test_rweakvaldict.py b/pypy/rlib/test/test_rweakvaldict.py --- a/pypy/rlib/test/test_rweakvaldict.py +++ b/pypy/rlib/test/test_rweakvaldict.py @@ -10,65 +10,78 @@ pass -def make_test(loop=100): +def make_test(loop=100, keyclass=str): + if keyclass is str: + make_key = str + keys = ["abc", "def", "ghi", "hello"] + elif keyclass is int: + make_key = int + keys = [123, 456, 789, 1234] + def g(d): - assert d.get("hello") is None + assert d.get(keys[3]) is None x1 = X(); x2 = X(); x3 = X() - d.set("abc", x1) - d.set("def", x2) - d.set("ghi", x3) - assert d.get("abc") is x1 - assert d.get("def") is x2 - assert d.get("ghi") is x3 - assert d.get("hello") is None + d.set(keys[0], x1) + d.set(keys[1], x2) + d.set(keys[2], x3) + assert d.get(keys[0]) is x1 + assert d.get(keys[1]) is x2 + assert d.get(keys[2]) is x3 + assert d.get(keys[3]) is None return x1, x3 # x2 dies def f(): - d = RWeakValueDictionary(str, X) + d = RWeakValueDictionary(keyclass, X) x1, x3 = g(d) rgc.collect(); rgc.collect() - assert d.get("abc") is x1 - assert d.get("def") is None - assert d.get("ghi") is x3 - assert d.get("hello") is None - d.set("abc", None) - assert d.get("abc") is None - assert d.get("def") is None - assert d.get("ghi") is x3 - assert d.get("hello") is None + assert d.get(keys[0]) is x1 + assert d.get(keys[1]) is None + assert d.get(keys[2]) is x3 + assert d.get(keys[3]) is None + d.set(keys[0], None) + assert d.get(keys[0]) is None + assert d.get(keys[1]) is None + assert d.get(keys[2]) is x3 + assert d.get(keys[3]) is None # resizing should also work for i in range(loop): - d.set(str(i), x1) + d.set(make_key(i), x1) for i in range(loop): - assert d.get(str(i)) is x1 - assert d.get("abc") is None - assert d.get("def") is None - assert d.get("ghi") is x3 - assert d.get("hello") is None + assert d.get(make_key(i)) is x1 + assert d.get(keys[0]) is None + assert d.get(keys[1]) is None + assert d.get(keys[2]) is x3 + assert d.get(keys[3]) is None # a subclass y = Y() - d.set("hello", y) - assert d.get("hello") is y + d.set(keys[3], y) + assert d.get(keys[3]) is y # storing a lot of Nones for i in range(loop, loop*2-5): - d.set('%dfoobar' % i, x1) + d.set(make_key(1000 + i), x1) for i in range(loop): - d.set(str(i), None) + d.set(make_key(i), None) for i in range(loop): - assert d.get(str(i)) is None - assert d.get("abc") is None - assert d.get("def") is None - assert d.get("ghi") is x3 - assert d.get("hello") is y + assert d.get(make_key(i)) is None + assert d.get(keys[0]) is None + assert d.get(keys[1]) is None + assert d.get(keys[2]) is x3 + assert d.get(keys[3]) is y for i in range(loop, loop*2-5): - assert d.get('%dfoobar' % i) is x1 + assert d.get(make_key(1000 + i)) is x1 return f def test_RWeakValueDictionary(): make_test()() +def test_RWeakValueDictionary_int(): + make_test(keyclass=int)() + def test_rpython_RWeakValueDictionary(): interpret(make_test(loop=12), []) +def test_rpython_RWeakValueDictionary_int(): + interpret(make_test(loop=12, keyclass=int), []) + def test_rpython_prebuilt(): d = RWeakValueDictionary(str, X) living = [X() for i in range(8)] From commits-noreply at bitbucket.org Sun Mar 13 23:41:36 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sun, 13 Mar 2011 23:41:36 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: Simplify code a bit: Message-ID: <20110313224136.75FA1282BDC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42579:9a46f08db058 Date: 2011-03-13 23:10 +0100 http://bitbucket.org/pypy/pypy/changeset/9a46f08db058/ Log: Simplify code a bit: remove the "traits" class and put all methods in the Repr itself. diff --git a/pypy/rlib/rweakref.py b/pypy/rlib/rweakref.py --- a/pypy/rlib/rweakref.py +++ b/pypy/rlib/rweakref.py @@ -106,7 +106,7 @@ def specialize_call(self, hop): from pypy.rlib import _rweakvaldict - return _rweakvaldict.specialize_make_weakdict(hop, hop.r_result.traits) + return _rweakvaldict.specialize_make_weakdict(hop) class Entry(extregistry.ExtRegistryEntry): _type_ = RWeakValueDictionary diff --git a/pypy/rlib/_rweakvaldict.py b/pypy/rlib/_rweakvaldict.py --- a/pypy/rlib/_rweakvaldict.py +++ b/pypy/rlib/_rweakvaldict.py @@ -13,8 +13,51 @@ def __init__(self, rtyper, r_key): self.rtyper = rtyper self.r_key = r_key - self.traits = make_WEAKDICT(r_key) - self.lowleveltype = lltype.Ptr(self.traits.WEAKDICT) + + fasthashfn = r_key.get_ll_fasthash_function() + self.ll_keyhash = r_key.get_ll_hash_function() + ll_keyeq = lltype.staticAdtMethod(r_key.get_ll_eq_function()) + + def ll_valid(entries, i): + value = entries[i].value + return bool(value) and bool(weakref_deref(rclass.OBJECTPTR, value)) + + def ll_everused(entries, i): + return bool(entries[i].value) + + def ll_hash(entries, i): + return fasthashfn(entries[i].key) + + entrymeths = { + 'allocate': lltype.typeMethod(rdict._ll_malloc_entries), + 'delete': rdict._ll_free_entries, + 'valid': ll_valid, + 'everused': ll_everused, + 'hash': ll_hash, + } + WEAKDICTENTRY = lltype.Struct("weakdictentry", + ("key", r_key.lowleveltype), + ("value", llmemory.WeakRefPtr)) + WEAKDICTENTRYARRAY = lltype.GcArray(WEAKDICTENTRY, + adtmeths=entrymeths, + hints={'weakarray': 'value'}) + # NB. the 'hints' is not used so far ^^^ + + dictmeths = { + 'll_get': self.ll_get, + 'll_set': self.ll_set, + 'keyeq': ll_keyeq, + 'paranoia': False, + } + + self.WEAKDICT = lltype.GcStruct( + "weakvaldict", + ("num_items", lltype.Signed), + ("num_pristine_entries", lltype.Signed), + ("entries", lltype.Ptr(WEAKDICTENTRYARRAY)), + adtmeths=dictmeths) + + self.lowleveltype = lltype.Ptr(self.WEAKDICT) self.dict_cache = {} def convert_const(self, weakdict): @@ -26,7 +69,7 @@ return self.dict_cache[key] except KeyError: self.setup() - l_dict = self.traits.ll_new_weakdict() + l_dict = self.ll_new_weakdict() self.dict_cache[key] = l_dict bk = self.rtyper.annotator.bookkeeper classdef = bk.getuniqueclassdef(weakdict._valueclass) @@ -36,14 +79,13 @@ llvalue = r_value.convert_const(dictvalue) if llvalue: llvalue = lltype.cast_pointer(rclass.OBJECTPTR, llvalue) - self.traits.ll_set_nonnull(l_dict, llkey, llvalue) + self.ll_set_nonnull(l_dict, llkey, llvalue) return l_dict def rtype_method_get(self, hop): - v_d, v_key = hop.inputargs(self, self.r_key) hop.exception_cannot_occur() - v_result = hop.gendirectcall(self.traits.ll_get, v_d, v_key) + v_result = hop.gendirectcall(self.ll_get, v_d, v_key) v_result = hop.genop("cast_pointer", [v_result], resulttype=hop.r_result.lowleveltype) return v_result @@ -53,136 +95,81 @@ v_d, v_key, v_value = hop.inputargs(self, self.r_key, r_object) hop.exception_cannot_occur() if hop.args_s[2].is_constant() and hop.args_s[2].const is None: - hop.gendirectcall(self.traits.ll_set_null, v_d, v_key) + hop.gendirectcall(self.ll_set_null, v_d, v_key) else: - hop.gendirectcall(self.traits.ll_set, v_d, v_key, v_value) + hop.gendirectcall(self.ll_set, v_d, v_key, v_value) -def specialize_make_weakdict(hop, traits): + # ____________________________________________________________ + + @jit.dont_look_inside + def ll_new_weakdict(self): + d = lltype.malloc(self.WEAKDICT) + d.entries = self.WEAKDICT.entries.TO.allocate(rdict.DICT_INITSIZE) + d.num_items = 0 + d.num_pristine_entries = rdict.DICT_INITSIZE + return d + + @jit.dont_look_inside + def ll_get(self, d, llkey): + hash = self.ll_keyhash(llkey) + i = rdict.ll_dict_lookup(d, llkey, hash) + #llop.debug_print(lltype.Void, i, 'get') + valueref = d.entries[i].value + if valueref: + return weakref_deref(rclass.OBJECTPTR, valueref) + else: + return lltype.nullptr(rclass.OBJECTPTR.TO) + + @jit.dont_look_inside + def ll_set(self, d, llkey, llvalue): + if llvalue: + self.ll_set_nonnull(d, llkey, llvalue) + else: + self.ll_set_null(d, llkey) + + @jit.dont_look_inside + def ll_set_nonnull(self, d, llkey, llvalue): + hash = self.ll_keyhash(llkey) + valueref = weakref_create(llvalue) # GC effects here, before the rest + i = rdict.ll_dict_lookup(d, llkey, hash) + everused = d.entries.everused(i) + d.entries[i].key = llkey + d.entries[i].value = valueref + #llop.debug_print(lltype.Void, i, 'stored') + if not everused: + d.num_pristine_entries -= 1 + if d.num_pristine_entries * 3 <= len(d.entries): + #llop.debug_print(lltype.Void, 'RESIZE') + self.ll_weakdict_resize(d) + + @jit.dont_look_inside + def ll_set_null(self, d, llkey): + hash = self.ll_keyhash(llkey) + i = rdict.ll_dict_lookup(d, llkey, hash) + if d.entries.everused(i): + # If the entry was ever used, clean up its key and value. + # We don't store a NULL value, but a dead weakref, because + # the entry must still be marked as everused(). + d.entries[i].value = llmemory.dead_wref + if isinstance(self.r_key.lowleveltype, lltype.Ptr): + d.entries[i].key = self.r_key.convert_const(None) + else: + d.entries[i].key = self.r_key.convert_const(0) + #llop.debug_print(lltype.Void, i, 'zero') + + def ll_weakdict_resize(self, d): + # first set num_items to its correct, up-to-date value + entries = d.entries + num_items = 0 + for i in range(len(entries)): + if entries.valid(i): + num_items += 1 + d.num_items = num_items + rdict.ll_dict_resize(d) + +def specialize_make_weakdict(hop): hop.exception_cannot_occur() - v_d = hop.gendirectcall(traits.ll_new_weakdict) + v_d = hop.gendirectcall(hop.r_result.ll_new_weakdict) return v_d -# ____________________________________________________________ - -def make_WEAKDICT(r_key): - KEY = r_key.lowleveltype - ll_keyhash = r_key.get_ll_hash_function() - if isinstance(KEY, lltype.Ptr): - zero_key = r_key.convert_const(None) - else: - zero_key = r_key.convert_const(0) - - WEAKDICTENTRY = lltype.Struct("weakdictentry", - ("key", KEY), - ("value", llmemory.WeakRefPtr)) - - def ll_valid(entries, i): - value = entries[i].value - return bool(value) and bool(weakref_deref(rclass.OBJECTPTR, value)) - - def ll_everused(entries, i): - return bool(entries[i].value) - - def ll_hash(entries, i): - return fasthashfn(entries[i].key) - fasthashfn = r_key.get_ll_fasthash_function() - - entrymeths = { - 'allocate': lltype.typeMethod(rdict._ll_malloc_entries), - 'delete': rdict._ll_free_entries, - 'valid': ll_valid, - 'everused': ll_everused, - 'hash': ll_hash, - } - WEAKDICTENTRYARRAY = lltype.GcArray(WEAKDICTENTRY, - adtmeths=entrymeths, - hints={'weakarray': 'value'}) - # NB. the 'hints' is not used so far ^^^ - - class Traits: - @staticmethod - @jit.dont_look_inside - def ll_new_weakdict(): - d = lltype.malloc(Traits.WEAKDICT) - d.entries = Traits.WEAKDICT.entries.TO.allocate(rdict.DICT_INITSIZE) - d.num_items = 0 - d.num_pristine_entries = rdict.DICT_INITSIZE - return d - - @staticmethod - @jit.dont_look_inside - def ll_get(d, llkey): - hash = ll_keyhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) - #llop.debug_print(lltype.Void, i, 'get') - valueref = d.entries[i].value - if valueref: - return weakref_deref(rclass.OBJECTPTR, valueref) - else: - return lltype.nullptr(rclass.OBJECTPTR.TO) - - @staticmethod - @jit.dont_look_inside - def ll_set(d, llkey, llvalue): - if llvalue: - Traits.ll_set_nonnull(d, llkey, llvalue) - else: - Traits.ll_set_null(d, llkey) - - @staticmethod - @jit.dont_look_inside - def ll_set_nonnull(d, llkey, llvalue): - hash = ll_keyhash(llkey) - valueref = weakref_create(llvalue) # GC effects here, before the rest - i = rdict.ll_dict_lookup(d, llkey, hash) - everused = d.entries.everused(i) - d.entries[i].key = llkey - d.entries[i].value = valueref - #llop.debug_print(lltype.Void, i, 'stored') - if not everused: - d.num_pristine_entries -= 1 - if d.num_pristine_entries * 3 <= len(d.entries): - #llop.debug_print(lltype.Void, 'RESIZE') - Traits.ll_weakdict_resize(d) - - @staticmethod - @jit.dont_look_inside - def ll_set_null(d, llkey): - hash = ll_keyhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) - if d.entries.everused(i): - # If the entry was ever used, clean up its key and value. - # We don't store a NULL value, but a dead weakref, because - # the entry must still be marked as everused(). - d.entries[i].value = llmemory.dead_wref - d.entries[i].key = zero_key - #llop.debug_print(lltype.Void, i, 'zero') - - @staticmethod - def ll_weakdict_resize(d): - # first set num_items to its correct, up-to-date value - entries = d.entries - num_items = 0 - for i in range(len(entries)): - if entries.valid(i): - num_items += 1 - d.num_items = num_items - rdict.ll_dict_resize(d) - - ll_keyeq = lltype.staticAdtMethod(r_key.get_ll_eq_function()) - - dictmeths = { - 'll_get': ll_get, - 'll_set': ll_set, - 'keyeq': ll_keyeq, - 'paranoia': False, - } - - WEAKDICT = lltype.GcStruct("weakvaldict", - ("num_items", lltype.Signed), - ("num_pristine_entries", lltype.Signed), - ("entries", lltype.Ptr(WEAKDICTENTRYARRAY)), - adtmeths=dictmeths) - - return Traits From commits-noreply at bitbucket.org Sun Mar 13 23:41:38 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sun, 13 Mar 2011 23:41:38 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: Use RWeakValueDictionary for _rawffi callbacks Message-ID: <20110313224138.2B46B282BD4@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42580:eedcddbe0991 Date: 2011-03-13 23:37 +0100 http://bitbucket.org/pypy/pypy/changeset/eedcddbe0991/ Log: Use RWeakValueDictionary for _rawffi callbacks diff --git a/pypy/module/_rawffi/callback.py b/pypy/module/_rawffi/callback.py --- a/pypy/module/_rawffi/callback.py +++ b/pypy/module/_rawffi/callback.py @@ -8,6 +8,7 @@ wrap_value, unwrap_value, unwrap_truncate_int, unpack_argshapes from pypy.rlib.clibffi import USERDATA_P, CallbackFuncPtr, FUNCFLAG_CDECL from pypy.rlib.clibffi import ffi_type_void +from pypy.rlib import rweakref from pypy.module._rawffi.tracker import tracker from pypy.interpreter.error import OperationError from pypy.interpreter import gateway @@ -23,7 +24,7 @@ def callback(ll_args, ll_res, ll_userdata): userdata = rffi.cast(USERDATA_P, ll_userdata) - callback_ptr = global_counter.CallbackPtr_by_number[userdata.addarg] + callback_ptr = global_counter.get(userdata.addarg) w_callable = callback_ptr.w_callable argtypes = callback_ptr.argtypes space = callback_ptr.space @@ -50,18 +51,8 @@ for i in range(resshape.size): ll_res[i] = '\x00' -# XXX some weird hackery to be able to recover W_CallbackPtr object -# out of number -class GlobalCounter: - def __init__(self): - self.CallbackPtr_id = 0 - self.CallbackPtr_by_number = {} +class W_CallbackPtr(W_DataInstance): -global_counter = GlobalCounter() - -class W_CallbackPtr(W_DataInstance): - global_counter = global_counter - def __init__(self, space, w_callable, w_args, w_result, flags=FUNCFLAG_CDECL): self.space = space @@ -74,13 +65,9 @@ else: self.result = None ffiresult = ffi_type_void - # necessary to keep stuff alive - number = global_counter.CallbackPtr_id - global_counter.CallbackPtr_id += 1 - global_counter.CallbackPtr_by_number[number] = self - self.number = number + self.number = global_counter.add(self) self.ll_callback = CallbackFuncPtr(ffiargs, ffiresult, - callback, number, flags) + callback, self.number, flags) self.ll_buffer = rffi.cast(rffi.VOIDP, self.ll_callback.ll_closure) if tracker.DO_TRACING: addr = rffi.cast(lltype.Signed, self.ll_callback.ll_closure) @@ -90,7 +77,27 @@ if tracker.DO_TRACING: addr = rffi.cast(lltype.Signed, self.ll_callback.ll_closure) tracker.trace_free(addr) - del self.global_counter.CallbackPtr_by_number[self.number] + global_counter.remove(self.number) + +# A global storage to be able to recover W_CallbackPtr object out of number +class GlobalCounter: + def __init__(self): + self.callback_id = 0 + self.callbacks = rweakref.RWeakValueDictionary(int, W_CallbackPtr) + + def add(self, w_callback): + self.callback_id += 1 + id = self.callback_id + self.callbacks.set(id, w_callback) + return id + + def remove(self, id): + self.callbacks.set(id, None) + + def get(self, id): + return self.callbacks.get(id) + +global_counter = GlobalCounter() @unwrap_spec(flags=int) def descr_new_callbackptr(space, w_type, w_callable, w_args, w_result, diff --git a/pypy/module/_rawffi/test/test__rawffi.py b/pypy/module/_rawffi/test/test__rawffi.py --- a/pypy/module/_rawffi/test/test__rawffi.py +++ b/pypy/module/_rawffi/test/test__rawffi.py @@ -214,10 +214,6 @@ cls.w_sizes_and_alignments = space.wrap(dict( [(k, (v.c_size, v.c_alignment)) for k,v in TYPEMAP.iteritems()])) - def teardown_method(self, func): - from pypy.module._rawffi.callback import global_counter - global_counter.CallbackPtr_by_number.clear() - def test_libload(self): import _rawffi _rawffi.CDLL(self.libc_name) From commits-noreply at bitbucket.org Mon Mar 14 00:27:05 2011 From: commits-noreply at bitbucket.org (mitsuhiko) Date: Mon, 14 Mar 2011 00:27:05 +0100 (CET) Subject: [pypy-svn] pypy default: (fijal, mitsuhiko, alex): improved cache estimation for gc nursery on OS X Message-ID: <20110313232705.96E0A282B90@codespeak.net> Author: Armin Ronacher Branch: Changeset: r42581:c56da18c083e Date: 2011-03-13 19:26 -0400 http://bitbucket.org/pypy/pypy/changeset/c56da18c083e/ Log: (fijal, mitsuhiko, alex): improved cache estimation for gc nursery on OS X diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -108,6 +108,7 @@ self.operations = operations self.storage = storage self.code = storage.disassemble_code(self.filename, self.startlineno) + assert self.code is not None, 'could not find source file' def repr(self): if self.filename is None: diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -190,44 +190,55 @@ rffi.VOIDP, rffi.SIZE_T], rffi.INT, sandboxsafe=True) + +def get_darwin_cache_size(cache_key): + cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') + try: + len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') + try: + size = rffi.sizeof(rffi.LONGLONG) + cache_p[0] = rffi.cast(rffi.LONGLONG, 0) + len_p[0] = rffi.cast(rffi.SIZE_T, size) + # XXX a hack for llhelper not being robust-enough + result = sysctlbyname(cache_key, + rffi.cast(rffi.VOIDP, cache_p), + len_p, + lltype.nullptr(rffi.VOIDP.TO), + rffi.cast(rffi.SIZE_T, 0)) + if (rffi.cast(lltype.Signed, result) == 0 and + rffi.cast(lltype.Signed, len_p[0]) == size): + cache = rffi.cast(lltype.Signed, cache_p[0]) + if rffi.cast(rffi.LONGLONG, cache) != cache_p[0]: + cache = 0 # overflow! + return cache + finally: + lltype.free(len_p, flavor='raw') + finally: + lltype.free(cache_p, flavor='raw') + + def get_L2cache_darwin(): """Try to estimate the best nursery size at run-time, depending on the machine we are running on. """ debug_start("gc-hardware") - L2cache = 0 - l2cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') - try: - len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') - try: - size = rffi.sizeof(rffi.LONGLONG) - l2cache_p[0] = rffi.cast(rffi.LONGLONG, 0) - len_p[0] = rffi.cast(rffi.SIZE_T, size) - # XXX a hack for llhelper not being robust-enough - result = sysctlbyname("hw.l2cachesize", - rffi.cast(rffi.VOIDP, l2cache_p), - len_p, - lltype.nullptr(rffi.VOIDP.TO), - rffi.cast(rffi.SIZE_T, 0)) - if (rffi.cast(lltype.Signed, result) == 0 and - rffi.cast(lltype.Signed, len_p[0]) == size): - L2cache = rffi.cast(lltype.Signed, l2cache_p[0]) - if rffi.cast(rffi.LONGLONG, L2cache) != l2cache_p[0]: - L2cache = 0 # overflow! - finally: - lltype.free(len_p, flavor='raw') - finally: - lltype.free(l2cache_p, flavor='raw') + L2cache = get_darwin_cache_size("hw.l2cachesize") + L3cache = get_darwin_cache_size("hw.l3cachesize") debug_print("L2cache =", L2cache) + debug_print("L3cache =", L3cache) debug_stop("gc-hardware") - if L2cache > 0: - return L2cache + + mangled = L2cache + L3cache + + if mangled > 0: + return mangled else: # Print a top-level warning even in non-debug builds llop.debug_print(lltype.Void, "Warning: cannot find your CPU L2 cache size with sysctl()") return -1 + # -------------------- get_L2cache = globals().get('get_L2cache_' + sys.platform, From commits-noreply at bitbucket.org Mon Mar 14 00:40:30 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 00:40:30 +0100 (CET) Subject: [pypy-svn] pypy default: Improve arbitrary limit of cache size on OS X Message-ID: <20110313234030.D870D282B90@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42582:2f5ddc612b7d Date: 2011-03-13 18:57 -0400 http://bitbucket.org/pypy/pypy/changeset/2f5ddc612b7d/ Log: Improve arbitrary limit of cache size on OS X diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py --- a/pypy/rpython/memory/gc/minimark.py +++ b/pypy/rpython/memory/gc/minimark.py @@ -115,7 +115,6 @@ ('forw', llmemory.Address)) FORWARDSTUBPTR = lltype.Ptr(FORWARDSTUB) - # ____________________________________________________________ class MiniMarkGC(MovingGCBase): diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -233,13 +233,17 @@ get_L2cache = globals().get('get_L2cache_' + sys.platform, lambda: -1) # implement me for other platforms +NURSERY_SIZE_UNKNOWN_CACHE = 1024*1024*1024 +# arbitrary 1M. better than default of 131k for most cases +# in case it didn't work + def best_nursery_size_for_L2cache(L2cache): # Heuristically, the best nursery size to choose is about half # of the L2 cache. if L2cache > 0: return L2cache // 2 else: - return -1 + return NURSERY_SIZE_UNKNOWN_CACHE def estimate_best_nursery_size(): """Try to estimate the best nursery size at run-time, depending From commits-noreply at bitbucket.org Mon Mar 14 00:40:31 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 00:40:31 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110313234031.905AC282BD4@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42583:1e04185aa0e7 Date: 2011-03-13 19:40 -0400 http://bitbucket.org/pypy/pypy/changeset/1e04185aa0e7/ Log: merge diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -190,44 +190,55 @@ rffi.VOIDP, rffi.SIZE_T], rffi.INT, sandboxsafe=True) + +def get_darwin_cache_size(cache_key): + cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') + try: + len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') + try: + size = rffi.sizeof(rffi.LONGLONG) + cache_p[0] = rffi.cast(rffi.LONGLONG, 0) + len_p[0] = rffi.cast(rffi.SIZE_T, size) + # XXX a hack for llhelper not being robust-enough + result = sysctlbyname(cache_key, + rffi.cast(rffi.VOIDP, cache_p), + len_p, + lltype.nullptr(rffi.VOIDP.TO), + rffi.cast(rffi.SIZE_T, 0)) + if (rffi.cast(lltype.Signed, result) == 0 and + rffi.cast(lltype.Signed, len_p[0]) == size): + cache = rffi.cast(lltype.Signed, cache_p[0]) + if rffi.cast(rffi.LONGLONG, cache) != cache_p[0]: + cache = 0 # overflow! + return cache + finally: + lltype.free(len_p, flavor='raw') + finally: + lltype.free(cache_p, flavor='raw') + + def get_L2cache_darwin(): """Try to estimate the best nursery size at run-time, depending on the machine we are running on. """ debug_start("gc-hardware") - L2cache = 0 - l2cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') - try: - len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') - try: - size = rffi.sizeof(rffi.LONGLONG) - l2cache_p[0] = rffi.cast(rffi.LONGLONG, 0) - len_p[0] = rffi.cast(rffi.SIZE_T, size) - # XXX a hack for llhelper not being robust-enough - result = sysctlbyname("hw.l2cachesize", - rffi.cast(rffi.VOIDP, l2cache_p), - len_p, - lltype.nullptr(rffi.VOIDP.TO), - rffi.cast(rffi.SIZE_T, 0)) - if (rffi.cast(lltype.Signed, result) == 0 and - rffi.cast(lltype.Signed, len_p[0]) == size): - L2cache = rffi.cast(lltype.Signed, l2cache_p[0]) - if rffi.cast(rffi.LONGLONG, L2cache) != l2cache_p[0]: - L2cache = 0 # overflow! - finally: - lltype.free(len_p, flavor='raw') - finally: - lltype.free(l2cache_p, flavor='raw') + L2cache = get_darwin_cache_size("hw.l2cachesize") + L3cache = get_darwin_cache_size("hw.l3cachesize") debug_print("L2cache =", L2cache) + debug_print("L3cache =", L3cache) debug_stop("gc-hardware") - if L2cache > 0: - return L2cache + + mangled = L2cache + L3cache + + if mangled > 0: + return mangled else: # Print a top-level warning even in non-debug builds llop.debug_print(lltype.Void, "Warning: cannot find your CPU L2 cache size with sysctl()") return -1 + # -------------------- get_L2cache = globals().get('get_L2cache_' + sys.platform, From commits-noreply at bitbucket.org Mon Mar 14 01:58:48 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 01:58:48 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Merged upstream. Message-ID: <20110314005848.2BDBF282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42584:c81c48acb556 Date: 2011-03-13 19:07 -0400 http://bitbucket.org/pypy/pypy/changeset/c81c48acb556/ Log: Merged upstream. From commits-noreply at bitbucket.org Mon Mar 14 01:58:49 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 01:58:49 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (fijal, alex): fix tests and translation (sort of) Message-ID: <20110314005849.32760282B90@codespeak.net> Author: Alex Gaynor Branch: jit-lsprofile Changeset: r42585:0263e80bf9e8 Date: 2011-03-13 20:40 -0400 http://bitbucket.org/pypy/pypy/changeset/0263e80bf9e8/ Log: (fijal, alex): fix tests and translation (sort of) diff --git a/pypy/translator/c/src/timer.h b/pypy/translator/c/src/timer.h --- a/pypy/translator/c/src/timer.h +++ b/pypy/translator/c/src/timer.h @@ -3,12 +3,11 @@ /* XXX Some overlap with the stuff in debug_print */ +#define OP_LL_READ_TIMESTAMP(v) v = pypy_read_timestamp(); #ifndef PYPY_NOT_MAIN_FILE /* implementations */ -#define OP_LL_READ_TIMESTAMP(v) v = pypy_read_timestamp(); - #ifdef _WIN32 long long pypy_read_timestamp(void) { long long timestamp; diff --git a/pypy/module/_lsprof/test/test_cprofile.py b/pypy/module/_lsprof/test/test_cprofile.py --- a/pypy/module/_lsprof/test/test_cprofile.py +++ b/pypy/module/_lsprof/test/test_cprofile.py @@ -102,7 +102,7 @@ f() profiler.disable() stats = profiler.getstats() - xxx + assert stats def test_cprofile(self): import sys, os From commits-noreply at bitbucket.org Mon Mar 14 02:06:40 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 02:06:40 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, fijal) split unicode_join and str_join into non-looping and looping Message-ID: <20110314010640.5DD9A282B90@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42586:5534161458d3 Date: 2011-03-13 21:06 -0400 http://bitbucket.org/pypy/pypy/changeset/5534161458d3/ Log: (alex, fijal) split unicode_join and str_join into non-looping and looping part diff --git a/pypy/objspace/std/unicodeobject.py b/pypy/objspace/std/unicodeobject.py --- a/pypy/objspace/std/unicodeobject.py +++ b/pypy/objspace/std/unicodeobject.py @@ -193,6 +193,9 @@ if space.is_w(space.type(w_s), space.w_unicode): return w_s + return _unicode_join_many_items(space, w_self, list_w, size) + +def _unicode_join_many_items(space, w_self, list_w, size): self = w_self._value sb = UnicodeBuilder() for i in range(size): diff --git a/pypy/objspace/std/stringobject.py b/pypy/objspace/std/stringobject.py --- a/pypy/objspace/std/stringobject.py +++ b/pypy/objspace/std/stringobject.py @@ -338,6 +338,9 @@ space.is_w(space.type(w_s), space.w_unicode)): return w_s + return _str_join_many_items(space, w_self, list_w, size) + +def _str_join_many_items(space, w_self, list_w, size): self = w_self._value reslen = len(self) * (size - 1) for i in range(size): From commits-noreply at bitbucket.org Mon Mar 14 02:41:56 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 02:41:56 +0100 (CET) Subject: [pypy-svn] pypy default: Fix jitviwer for regex. Message-ID: <20110314014156.A00E4282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42587:bbc7904297a5 Date: 2011-03-13 21:41 -0400 http://bitbucket.org/pypy/pypy/changeset/bbc7904297a5/ Log: Fix jitviwer for regex. diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -4,7 +4,7 @@ class Op(object): bridge = None - + def __init__(self, name, args, res, descr): self.name = name self.args = args @@ -56,7 +56,7 @@ def parse_from_input(cls, input): return cls(input, None, {}, 'lltype', None, nonstrict=True).parse() - + def parse_args(self, opname, argspec): if not argspec.strip(): return [], None @@ -92,7 +92,7 @@ bytecode_name = None is_bytecode = True inline_level = None - + def __init__(self, operations, storage): if operations[0].name == 'debug_merge_point': self.inline_level = int(operations[0].args[1]) @@ -108,7 +108,7 @@ self.operations = operations self.storage = storage self.code = storage.disassemble_code(self.filename, self.startlineno) - assert self.code is not None, 'could not find source file' + assert self.filename is None or self.code is not None, 'could not find source file' def repr(self): if self.filename is None: @@ -147,7 +147,7 @@ # factory method TraceForOpcode = TraceForOpcode - + def __init__(self, chunks, path, storage): self.path = path self.chunks = chunks @@ -238,7 +238,7 @@ return "Unknown" return "%s, file '%s', line %d" % (self.name, self.filename, self.startlineno) - + def __repr__(self): return "[%s]" % ", ".join([repr(chunk) for chunk in self.chunks]) From commits-noreply at bitbucket.org Mon Mar 14 02:41:56 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 02:41:56 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110314014156.C99AA282BD4@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42588:e4c01e3c8364 Date: 2011-03-13 21:41 -0400 http://bitbucket.org/pypy/pypy/changeset/e4c01e3c8364/ Log: merged upstream From commits-noreply at bitbucket.org Mon Mar 14 03:19:22 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 03:19:22 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, fijal): remove assert that breaks tests Message-ID: <20110314021922.DDBD1282BD8@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42589:65a45901bef3 Date: 2011-03-13 22:19 -0400 http://bitbucket.org/pypy/pypy/changeset/65a45901bef3/ Log: (alex, fijal): remove assert that breaks tests diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -108,7 +108,6 @@ self.operations = operations self.storage = storage self.code = storage.disassemble_code(self.filename, self.startlineno) - assert self.filename is None or self.code is not None, 'could not find source file' def repr(self): if self.filename is None: diff --git a/pypy/tool/jitlogparser/test/test_parser.py b/pypy/tool/jitlogparser/test/test_parser.py --- a/pypy/tool/jitlogparser/test/test_parser.py +++ b/pypy/tool/jitlogparser/test/test_parser.py @@ -68,7 +68,7 @@ assert isinstance(res.chunks[2], TraceForOpcode) assert res.chunks[1].path == "1" assert len(res.chunks[1].chunks) == 3 - + def test_name(): ops = parse(''' [i0] From commits-noreply at bitbucket.org Mon Mar 14 03:38:15 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 03:38:15 +0100 (CET) Subject: [pypy-svn] jitviewer default: (alex, fijal): Raise a useful error if the file is not found. Message-ID: <20110314023815.C7CAD36C20C@codespeak.net> Author: Alex Gaynor Branch: Changeset: r106:001fb4a85491 Date: 2011-03-13 22:36 -0400 http://bitbucket.org/pypy/jitviewer/changeset/001fb4a85491/ Log: (alex, fijal): Raise a useful error if the file is not found. diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -41,6 +41,9 @@ CUTOFF = 30 +class CannotFindFile(Exception): + pass + class Server(object): def __init__(self, storage): self.storage = storage @@ -123,6 +126,13 @@ self._root_path = kwargs.pop('root_path') flask.Flask.__init__(self, *args, **kwargs) +class CheckingLoopStorage(LoopStorage): + def disassemble_code(self, fname, startlineno): + result = super(CheckingLoopStorage, self).disassemble_code(fname, startlineno) + if result is None and fname is not None: + raise CannotFindFile(fname) + return result + def main(): PATH = os.path.join(os.path.dirname( os.path.dirname(_jitviewer.__file__))) @@ -139,7 +149,7 @@ port = 5000 else: port = int(sys.argv[2]) - storage = LoopStorage(extra_path) + storage = CheckingLoopStorage(extra_path) loops = [ParserWithHtmlRepr.parse_from_input(l) for l in extract_category(log, "jit-log-opt-")] parse_log_counts(extract_category(log, 'jit-backend-count'), loops) From commits-noreply at bitbucket.org Mon Mar 14 03:38:16 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 03:38:16 +0100 (CET) Subject: [pypy-svn] jitviewer default: merged upstream. Message-ID: <20110314023816.26600282BD8@codespeak.net> Author: Alex Gaynor Branch: Changeset: r107:aa0319272065 Date: 2011-03-13 22:38 -0400 http://bitbucket.org/pypy/jitviewer/changeset/aa0319272065/ Log: merged upstream. diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -19,6 +19,17 @@ except ImportError: sys.path.insert(0, os.path.abspath(os.path.join(__file__, '..', '..'))) +try: + import pypy +except ImportError: + import __pypy__ + sys.path.append(os.path.join(__pypy__.__file__, '..', '..', '..')) + try: + import pypy + except ImportError: + raise ImportError('Could not import pypy module, make sure to ' + 'add the pypy module to PYTHONPATH') + import cgi import flask import inspect From commits-noreply at bitbucket.org Mon Mar 14 04:19:15 2011 From: commits-noreply at bitbucket.org (carljm) Date: Mon, 14 Mar 2011 04:19:15 +0100 (CET) Subject: [pypy-svn] pypy merge-stdlib: Removed references to modified-* stdlib directories. Message-ID: <20110314031915.D3F1D282BD8@codespeak.net> Author: Carl Meyer Branch: merge-stdlib Changeset: r42591:dd43f492db87 Date: 2011-03-13 22:21 -0400 http://bitbucket.org/pypy/pypy/changeset/dd43f492db87/ Log: Removed references to modified-* stdlib directories. diff --git a/lib-python/2.7.0/test/regrtest.py b/lib-python/2.7.0/test/regrtest.py --- a/lib-python/2.7.0/test/regrtest.py +++ b/lib-python/2.7.0/test/regrtest.py @@ -676,13 +676,8 @@ def findtests(testdir=None, stdtests=STDTESTS, nottests=NOTTESTS): """Return a list of all applicable test modules.""" - if testdir: - testdirs = [testdir] - else: - testdirs = findtestdirs() - names = {} - for testdir in testdirs: - names.update(dict.fromkeys(os.listdir(testdir))) + testdir = findtestdir(testdir) + names = os.listdir(testdir) tests = [] others = set(stdtests) | nottests for name in names: @@ -852,6 +847,7 @@ def runtest_inner(test, verbose, quiet, testdir=None, huntrleaks=False): test_support.unload(test) + testdir = findtestdir(testdir) if verbose: capture_stdout = None else: @@ -1084,19 +1080,8 @@ # Collect cyclic trash. gc.collect() -def findtestdirs(): - # XXX hacking: returns a list of both the '2.7.0/test' and the - # 'modified-2.7.0/test' directories, as full paths. - testdir = os.path.abspath(os.path.dirname(__file__) or os.curdir) - assert os.path.basename(testdir).lower() == 'test' - maindir = os.path.dirname(testdir) - libpythondir = os.path.dirname(maindir) - maindirname = os.path.basename(maindir).lower() - if maindirname.startswith('modified-'): - maindirname = maindirname[len('modified-'):] - testdir1 = os.path.join(libpythondir, maindirname, 'test') - testdir2 = os.path.join(libpythondir, 'modified-'+maindirname, 'test') - return [testdir1, testdir2] +def findtestdir(path=None): + return path or os.path.dirname(__file__) or os.curdir def removepy(names): if not names: @@ -1518,7 +1503,13 @@ return self.expected if __name__ == '__main__': - # Simplification for findtestdir(). + # findtestdir() gets the dirname out of __file__, so we have to make it + # absolute before changing the working directory. + # For example __file__ may be relative when running trace or profile. + # See issue #9323. + __file__ = os.path.abspath(__file__) + + # sanity check assert __file__ == os.path.abspath(sys.argv[0]) # When tests are run from the Python build directory, it is best practice diff --git a/pypy/tool/lib_pypy.py b/pypy/tool/lib_pypy.py --- a/pypy/tool/lib_pypy.py +++ b/pypy/tool/lib_pypy.py @@ -7,7 +7,6 @@ LIB_PYPY = LIB_ROOT.join('lib_pypy') LIB_PYTHON = LIB_ROOT.join('lib-python') LIB_PYTHON_VANILLA = LIB_PYTHON.join('%d.%d.%d' % CPYTHON_VERSION[:3]) -LIB_PYTHON_MODIFIED = LIB_PYTHON.join('modified-%d.%d.%d' % CPYTHON_VERSION[:3]) def import_from_lib_pypy(modname): diff --git a/pypy/tool/pytest/confpath.py b/pypy/tool/pytest/confpath.py --- a/pypy/tool/pytest/confpath.py +++ b/pypy/tool/pytest/confpath.py @@ -9,4 +9,3 @@ assert pypydir.check(dir=1) libpythondir = lib_pypy.LIB_PYTHON regrtestdir = lib_pypy.LIB_PYTHON_VANILLA.join('test') -modregrtestdir = lib_pypy.LIB_PYTHON_MODIFIED.join('test') diff --git a/pypy/tool/test/test_lib_pypy.py b/pypy/tool/test/test_lib_pypy.py --- a/pypy/tool/test/test_lib_pypy.py +++ b/pypy/tool/test/test_lib_pypy.py @@ -8,7 +8,6 @@ def test_lib_python_exists(): assert lib_pypy.LIB_PYTHON.check(dir=1) assert lib_pypy.LIB_PYTHON_VANILLA.check(dir=1) - assert lib_pypy.LIB_PYTHON_MODIFIED.check(dir=1) def test_import_from_lib_pypy(): binascii = lib_pypy.import_from_lib_pypy('binascii') diff --git a/pypy/translator/goal/test2/test_app_main.py b/pypy/translator/goal/test2/test_app_main.py --- a/pypy/translator/goal/test2/test_app_main.py +++ b/pypy/translator/goal/test2/test_app_main.py @@ -23,8 +23,8 @@ def getscript(source): p = _get_next_path() p.write(str(py.code.Source(source))) - # return relative path for testing purposes - return py.path.local().bestrelpath(p) + # return relative path for testing purposes + return py.path.local().bestrelpath(p) def getscript_pyc(space, source): p = _get_next_path() @@ -49,7 +49,7 @@ pdir = _get_next_path(ext='') p = pdir.ensure(dir=1).join('__main__.py') p.write(str(py.code.Source(source))) - # return relative path for testing purposes + # return relative path for testing purposes return py.path.local().bestrelpath(pdir) demo_script = getscript(""" @@ -192,7 +192,7 @@ monkeypatch.setenv('PYTHONNOUSERSITE', '1') expected = {"no_user_site": True} self.check(['-c', 'pass'], sys_argv=['-c'], run_command='pass', **expected) - + class TestInteraction: """ @@ -780,9 +780,9 @@ # ------------------------------------ # setup code for test_get_library_path # ------------------------------------ - from pypy.module.sys.version import CPYTHON_VERSION, PYPY_VERSION + from pypy.module.sys.version import CPYTHON_VERSION cpy_ver = '%d.%d.%d' % CPYTHON_VERSION[:3] - + goal_dir = os.path.dirname(app_main) # build a directory hierarchy like which contains both bin/pypy-c and # lib/pypy1.2/* @@ -790,9 +790,8 @@ fake_exe = prefix.join('bin/pypy-c').ensure(file=1) expected_path = [str(prefix.join(subdir).ensure(dir=1)) for subdir in ('lib_pypy', - 'lib-python/modified-%s' % cpy_ver, 'lib-python/%s' % cpy_ver)] - + self.w_goal_dir = self.space.wrap(goal_dir) self.w_fake_exe = self.space.wrap(str(fake_exe)) self.w_expected_path = self.space.wrap(expected_path) @@ -822,7 +821,7 @@ app_main.os = os pypy_c = os.path.join(self.trunkdir, 'pypy', 'translator', 'goal', 'pypy-c') newpath = app_main.get_library_path(pypy_c) - # we get at least lib_pypy, lib-python/modified-X.Y.Z, + # we get at least lib_pypy, # lib-python/X.Y.Z, and maybe more (e.g. plat-linux2) assert len(newpath) >= 3 for p in newpath: diff --git a/pypy/translator/sandbox/test/test_pypy_interact.py b/pypy/translator/sandbox/test/test_pypy_interact.py --- a/pypy/translator/sandbox/test/test_pypy_interact.py +++ b/pypy/translator/sandbox/test/test_pypy_interact.py @@ -1,5 +1,5 @@ import autopath -import os, sys, stat, errno +import os, stat, errno from pypy.translator.sandbox.pypy_interact import PyPySandboxedProc from pypy.translator.interactive import Translation from pypy.module.sys.version import CPYTHON_VERSION @@ -8,7 +8,7 @@ SITE_PY_CONTENT = open(os.path.join(autopath.pypydir, '..', 'lib-python', - 'modified-' + VERSION, 'site.py'), + VERSION, 'site.py'), 'rb').read() ERROR_TEXT = os.strerror(errno.ENOENT) @@ -39,15 +39,15 @@ pass else: assert_(False, "os.stat('site') should have failed") - st = os.stat('/bin/lib-python/modified-%s/site.py' % VERSION) + st = os.stat('/bin/lib-python/%s/site.py' % VERSION) assert_(stat.S_ISREG(st.st_mode), "bad st_mode for .../site.py") try: - os.stat('/bin/lib-python/modified-%s/site.pyc' % VERSION) + os.stat('/bin/lib-python/%s/site.pyc' % VERSION) except OSError: pass else: assert_(False, "os.stat('....pyc') should have failed") - fd = os.open('/bin/lib-python/modified-%s/site.py' % VERSION, + fd = os.open('/bin/lib-python/%s/site.py' % VERSION, os.O_RDONLY, 0666) length = 8192 ofs = 0 diff --git a/lib-python/2.7.0/site.py b/lib-python/2.7.0/site.py --- a/lib-python/2.7.0/site.py +++ b/lib-python/2.7.0/site.py @@ -89,10 +89,7 @@ if hasattr(m, '__loader__'): continue # don't mess with a PEP 302-supplied __file__ try: - prev = m.__file__ - new = os.path.abspath(m.__file__) - if prev != new: - m.__file__ = new + m.__file__ = os.path.abspath(m.__file__) except (AttributeError, OSError): pass diff --git a/pypy/module/sys/test/test_initialpath.py b/pypy/module/sys/test/test_initialpath.py --- a/pypy/module/sys/test/test_initialpath.py +++ b/pypy/module/sys/test/test_initialpath.py @@ -7,9 +7,8 @@ CPYTHON_VERSION[1], CPYTHON_VERSION[2]) a = prefix.join('lib_pypy').ensure(dir=1) - b = prefix.join('lib-python', 'modified-%s' % dirname).ensure(dir=1) - c = prefix.join('lib-python', dirname).ensure(dir=1) - return a, b, c + b = prefix.join('lib-python', dirname).ensure(dir=1) + return a, b def test_stdlib_in_prefix(tmpdir): diff --git a/pypy/tool/stdlib_opcode.py b/pypy/tool/stdlib_opcode.py --- a/pypy/tool/stdlib_opcode.py +++ b/pypy/tool/stdlib_opcode.py @@ -106,8 +106,8 @@ opmap as host_opmap, HAVE_ARGUMENT as host_HAVE_ARGUMENT) def load_pypy_opcode(): - from pypy.tool.lib_pypy import LIB_PYTHON_MODIFIED - opcode_path = LIB_PYTHON_MODIFIED.join('opcode.py') + from pypy.tool.lib_pypy import LIB_PYTHON_VANILLA + opcode_path = LIB_PYTHON_VANILLA.join('opcode.py') d = {} execfile(str(opcode_path), d) for name in __all__: diff --git a/pypy/module/sys/state.py b/pypy/module/sys/state.py --- a/pypy/module/sys/state.py +++ b/pypy/module/sys/state.py @@ -2,7 +2,6 @@ Implementation of interpreter-level 'sys' routines. """ import pypy -from pypy.interpreter.error import OperationError from pypy.interpreter.gateway import unwrap_spec import sys, os, stat, errno @@ -10,17 +9,17 @@ # ____________________________________________________________ # -class State: - def __init__(self, space): - self.space = space +class State: + def __init__(self, space): + self.space = space self.w_modules = space.newdict(module=True) self.w_warnoptions = space.newlist([]) self.w_argv = space.newlist([]) - self.setinitialpath(space) + self.setinitialpath(space) - def setinitialpath(self, space): + def setinitialpath(self, space): # Initialize the default path pypydir = os.path.dirname(os.path.abspath(pypy.__file__)) srcdir = os.path.dirname(pypydir) @@ -43,15 +42,12 @@ lib_python = os.path.join(prefix, 'lib-python') python_std_lib = os.path.join(lib_python, dirname) checkdir(python_std_lib) - python_std_lib_modified = os.path.join(lib_python, 'modified-' + dirname) - checkdir(python_std_lib_modified) - + lib_pypy = os.path.join(prefix, 'lib_pypy') checkdir(lib_pypy) importlist = [] importlist.append(lib_pypy) - importlist.append(python_std_lib_modified) importlist.append(python_std_lib) # # List here the extra platform-specific paths. From commits-noreply at bitbucket.org Mon Mar 14 04:19:16 2011 From: commits-noreply at bitbucket.org (carljm) Date: Mon, 14 Mar 2011 04:19:16 +0100 (CET) Subject: [pypy-svn] pypy merge-stdlib: merged from default Message-ID: <20110314031916.36925282BD9@codespeak.net> Author: Carl Meyer Branch: merge-stdlib Changeset: r42592:690d6c7070b6 Date: 2011-03-13 22:27 -0400 http://bitbucket.org/pypy/pypy/changeset/690d6c7070b6/ Log: merged from default From commits-noreply at bitbucket.org Mon Mar 14 04:19:18 2011 From: commits-noreply at bitbucket.org (carljm) Date: Mon, 14 Mar 2011 04:19:18 +0100 (CET) Subject: [pypy-svn] pypy merge-stdlib: Removed some additional modified- references. Message-ID: <20110314031918.CD2B636C20D@codespeak.net> Author: Carl Meyer Branch: merge-stdlib Changeset: r42593:52db46c411ae Date: 2011-03-13 23:09 -0400 http://bitbucket.org/pypy/pypy/changeset/52db46c411ae/ Log: Removed some additional modified- references. diff --git a/lib-python/conftest.py b/lib-python/conftest.py --- a/lib-python/conftest.py +++ b/lib-python/conftest.py @@ -18,7 +18,7 @@ from pypy.tool.pytest import appsupport from pypy.tool.pytest.confpath import pypydir, libpythondir, \ - regrtestdir, modregrtestdir, testresultdir + regrtestdir, testresultdir pytest_plugins = "resultlog", rsyncdirs = ['.', '../pypy/'] @@ -75,13 +75,7 @@ return self._compiler #or pypy_option.compiler compiler = property(compiler) - def ismodified(self): - return modregrtestdir.join(self.basename).check() - def getfspath(self): - fn = modregrtestdir.join(self.basename) - if fn.check(): - return fn fn = regrtestdir.join(self.basename) return fn @@ -546,7 +540,7 @@ regrtest = parent.config._basename2spec.get(path.basename, None) if regrtest is None: return - if path.dirpath() not in (modregrtestdir, regrtestdir): + if path.dirpath() != regrtestdir: return return RunFileExternal(path.basename, parent=parent, regrtest=regrtest) @@ -557,11 +551,7 @@ self.fspath = regrtest.getfspath() def collect(self): - if self.regrtest.ismodified(): - name = 'modified' - else: - name = 'unmodified' - return [ReallyRunFileExternal(name, parent=self)] + return [ReallyRunFileExternal("module", parent=self)] # # testmethod: @@ -711,15 +701,3 @@ if regrtest.core: lst.append('core') return lst - -# -# Sanity check (could be done more nicely too) -# -import os -samefile = getattr(os.path, 'samefile', - lambda x,y : str(x) == str(y)) -if samefile(os.getcwd(), str(regrtestdir.dirpath())): - raise NotImplementedError( - "Cannot run py.test with this current directory:\n" - "the app-level sys.path will contain %s before %s)." % ( - regrtestdir.dirpath(), modregrtestdir.dirpath())) diff --git a/pypy/tool/pytest/result.py b/pypy/tool/pytest/result.py --- a/pypy/tool/pytest/result.py +++ b/pypy/tool/pytest/result.py @@ -173,10 +173,6 @@ unicode(content, candidate) self.addnamedtext(fn, text) - def ismodifiedtest(self): - # XXX we need proper cross-platform paths! - return 'modified' in self.fspath - def __repr__(self): return '<%s (%s) %r rev=%s>' %(self.__class__.__name__, self['outcome'], diff --git a/pypy/tool/pytest/htmlreport.py b/pypy/tool/pytest/htmlreport.py --- a/pypy/tool/pytest/htmlreport.py +++ b/pypy/tool/pytest/htmlreport.py @@ -75,8 +75,7 @@ def render_test_references(self, result): dest = self.make_single_test_result(result) - modified = result.ismodifiedtest() and " [mod]" or "" - return html.div(html.a(result.path.purebasename + modified, + return html.div(html.a(result.path.purebasename, href=self.getrelpath(dest)), style="background-color: transparent") From commits-noreply at bitbucket.org Mon Mar 14 04:35:31 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 04:35:31 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: (alex, fijal):remove things that are done Message-ID: <20110314033531.4A148282BD8@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3376:cf6fe5769cc6 Date: 2011-03-13 23:35 -0400 http://bitbucket.org/pypy/extradoc/changeset/cf6fe5769cc6/ Log: (alex, fijal):remove things that are done diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -17,10 +17,6 @@ current exception from the struct in memory, followed by a regular GUARD_CLASS. (Armin: Look like a simplification, but it's a bit messy too) -- write a document that says what you cannot expect the jit to optimize. - E.g. http://paste.pocoo.org/show/181319/ with B being old-style and - C being new-style, or vice-versa. - - maybe refactor a bit the x86 backend, particularly the register allocation @@ -42,9 +38,6 @@ - support casting from Signed to an opaque pointer -- geninterp fun :-( geninterp'ed functions are not JITted, - unlike plain app-level functions. How about we just kill geninterp? - - local imports should be jitted more efficiently, right now they produce a long trace and they are rather common (e.g. in translate.py) @@ -78,9 +71,6 @@ Should be just a matter of synthesizing reverse operations in rewrite.py -- Call to ll_find right after allocating a ``newstr(1)`` should just change - the call to ll_find_char and remove the allocation if it can. - PYTHON EXAMPLES --------------- From commits-noreply at bitbucket.org Mon Mar 14 04:55:25 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 04:55:25 +0100 (CET) Subject: [pypy-svn] pypy default: merge enable-opts. it fails test_zrpy_gc in backend/x86 for not very good Message-ID: <20110314035525.22148282BD8@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42594:de39afd35f4b Date: 2011-03-13 23:54 -0400 http://bitbucket.org/pypy/pypy/changeset/de39afd35f4b/ Log: merge enable-opts. it fails test_zrpy_gc in backend/x86 for not very good reasons, but we need this on other branches to debug more pressing things. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5,7 +5,7 @@ BaseTest) import pypy.jit.metainterp.optimizeopt.optimizer as optimizeopt import pypy.jit.metainterp.optimizeopt.virtualize as virtualize -from pypy.jit.metainterp.optimizeopt import optimize_loop_1 +from pypy.jit.metainterp.optimizeopt import optimize_loop_1, ALL_OPTS_DICT from pypy.jit.metainterp.optimizeutil import InvalidLoop from pypy.jit.metainterp.history import AbstractDescr, ConstInt, BoxInt from pypy.jit.metainterp.history import TreeLoop, LoopToken @@ -188,7 +188,7 @@ def clone_if_mutable(self): return self loop.preamble.start_resumedescr = FakeDescr() - optimize_loop_1(metainterp_sd, loop) + optimize_loop_1(metainterp_sd, loop, ALL_OPTS_DICT) # print diff --git a/pypy/jit/metainterp/test/test_loop.py b/pypy/jit/metainterp/test/test_loop.py --- a/pypy/jit/metainterp/test/test_loop.py +++ b/pypy/jit/metainterp/test/test_loop.py @@ -1,5 +1,5 @@ import py -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, OPTIMIZER_FULL +from pypy.rlib.jit import JitDriver from pypy.rlib.objectmodel import compute_hash from pypy.jit.metainterp.warmspot import ll_meta_interp, get_stats from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin @@ -8,14 +8,15 @@ from pypy.jit.metainterp import history class LoopTest(object): - optimizer = OPTIMIZER_SIMPLE + enable_opts = '' + automatic_promotion_result = { 'int_add' : 6, 'int_gt' : 1, 'guard_false' : 1, 'jump' : 1, 'guard_value' : 3 } def meta_interp(self, f, args, policy=None): - return ll_meta_interp(f, args, optimizer=self.optimizer, + return ll_meta_interp(f, args, enable_opts=self.enable_opts, policy=policy, CPUClass=self.CPUClass, type_system=self.type_system) @@ -58,7 +59,7 @@ res = self.meta_interp(f, [6, 13]) assert res == f(6, 13) self.check_loop_count(1) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loops(getfield_gc = 0, setfield_gc = 1) def test_loop_with_two_paths(self): @@ -87,7 +88,7 @@ return res * 2 res = self.meta_interp(f, [6, 33], policy=StopAtXPolicy(l)) assert res == f(6, 33) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(3) else: self.check_loop_count(2) @@ -105,7 +106,7 @@ pattern >>= 1 return 42 self.meta_interp(f, [0xF0F0F0]) - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(3) else: self.check_loop_count(2) @@ -595,7 +596,7 @@ res = self.meta_interp(f, [100, 5], policy=StopAtXPolicy(externfn)) assert res == expected - if self.optimizer == OPTIMIZER_FULL: + if self.enable_opts: self.check_loop_count(2) self.check_tree_loop_count(2) # 1 loop, 1 bridge from interp else: @@ -799,7 +800,6 @@ res = self.meta_interp(f, [200]) - class TestOOtype(LoopTest, OOJitMixin): pass diff --git a/pypy/jit/metainterp/test/test_send_simple.py b/pypy/jit/metainterp/test/test_send_simple.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_simple.py +++ /dev/null @@ -1,29 +0,0 @@ -# xxx mostly pointless - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_SIMPLE -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopDummyTest(test_send.SendTests): - optimizer=OPTIMIZER_SIMPLE - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - -class TestLLtype(LoopDummyTest, LLJitMixin): - pass - -class TestOOtype(LoopDummyTest, OOJitMixin): - pass diff --git a/pypy/jit/metainterp/test/test_send_nounroll.py b/pypy/jit/metainterp/test/test_send_nounroll.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_nounroll.py +++ /dev/null @@ -1,30 +0,0 @@ - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_NO_UNROLL -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopNoPSpecTest(test_send.SendTests): - optimizer=OPTIMIZER_NO_UNROLL - - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - - -class TestLLtype(LoopNoPSpecTest, LLJitMixin): - pass - -class TestOOtype(LoopNoPSpecTest, OOJitMixin): - pass diff --git a/pypy/jit/metainterp/nounroll_optimize.py b/pypy/jit/metainterp/nounroll_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/nounroll_optimize.py +++ /dev/null @@ -1,36 +0,0 @@ - -from pypy.rlib.debug import debug_start, debug_stop -from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1 - -def optimize_loop(metainterp_sd, old_loop_tokens, loop): - debug_start("jit-optimize") - try: - return _optimize_loop(metainterp_sd, old_loop_tokens, loop) - finally: - debug_stop("jit-optimize") - -def _optimize_loop(metainterp_sd, old_loop_tokens, loop): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) - if old_loop_tokens: - return old_loop_tokens[0] - optimize_loop_1(metainterp_sd, loop, False) - return None - -def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, - inline_short_preamble, retraced=False): - debug_start("jit-optimize") - try: - return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge) - finally: - debug_stop("jit-optimize") - -def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) - if old_loop_tokens: - old_loop_token = old_loop_tokens[0] - bridge.operations[-1].setdescr(old_loop_token) # patch jump target - optimize_bridge_1(metainterp_sd, bridge) - return old_loop_token - return None From commits-noreply at bitbucket.org Mon Mar 14 04:55:25 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 04:55:25 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110314035525.5C09C282BDA@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42595:63e875c8adc8 Date: 2011-03-13 23:55 -0400 http://bitbucket.org/pypy/pypy/changeset/63e875c8adc8/ Log: merge From commits-noreply at bitbucket.org Mon Mar 14 10:38:41 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 10:38:41 +0100 (CET) Subject: [pypy-svn] pypy default: port test_method_call from test_pypy_c Message-ID: <20110314093841.91726282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42596:6e20cdd51702 Date: 2011-03-11 10:06 +0100 http://bitbucket.org/pypy/pypy/changeset/6e20cdd51702/ Log: port test_method_call from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -196,6 +196,46 @@ jump(p0, p1, p2, p3, p4, i21, i6, i7, p8, p9, p10, p11, descr=) """) + def test_method_call(self): + def fn(n): + class A(object): + def __init__(self, a): + self.a = a + def f(self, i): + return self.a + i + i = 0 + a = A(1) + while i < n: + x = a.f(i) # ID: meth1 + i = a.f(x) # ID: meth2 + return i + # + log = self.run(fn, [1000], threshold=400) + # + # first, we test the entry bridge + # ------------------------------- + entry_bridge, = log.loops_by_filename(self.filepath, is_entry_bridge=True) + ops = entry_bridge.ops_by_id('meth1', opcode='LOOKUP_METHOD') + assert log.opnames(ops) == ['guard_value', 'getfield_gc', 'guard_value', + 'getfield_gc', 'guard_value'] + # the second LOOKUP_METHOD is folded away + assert list(entry_bridge.ops_by_id('meth2', opcode='LOOKUP_METHOD')) == [] + # + # then, the actual loop + # ---------------------- + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i15 = int_lt(i6, i9) + guard_true(i15, descr=) + i16 = force_token() + i17 = int_add_ovf(i10, i6) + guard_no_overflow(descr=) + i18 = force_token() + i19 = int_add_ovf(i10, i17) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, i19, p7, i17, i9, i10, p11, p12, p13, p14, descr=) + """) def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Mon Mar 14 10:38:42 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 10:38:42 +0100 (CET) Subject: [pypy-svn] pypy default: port test_static_classmethod_call from test_pypy_c Message-ID: <20110314093842.29282282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42597:7a5aa5087043 Date: 2011-03-11 10:11 +0100 http://bitbucket.org/pypy/pypy/changeset/7a5aa5087043/ Log: port test_static_classmethod_call from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -211,6 +211,7 @@ return i # log = self.run(fn, [1000], threshold=400) + assert log.result == 1000 # # first, we test the entry bridge # ------------------------------- @@ -237,6 +238,39 @@ jump(p0, p1, p2, p3, p4, p5, i19, p7, i17, i9, i10, p11, p12, p13, p14, descr=) """) + def test_static_classmethod_call(self): + def fn(n): + class A(object): + @classmethod + def f(cls, i): + return i + (cls is A) + 1 + @staticmethod + def g(i): + return i - 1 + # + i = 0 + a = A() + while i < n: + x = a.f(i) + i = a.g(x) + return i + # + log = self.run(fn, [1000], threshold=400) + assert log.result == 1000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i14 = int_lt(i6, i9) + guard_true(i14, descr=) + i15 = force_token() + i17 = int_add_ovf(i8, 1) + guard_no_overflow(descr=) + i18 = force_token() + i20 = int_sub(i17, 1) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, i20, p7, i17, i9, p10, p11, p12, p13, descr=) + """) + + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Mon Mar 14 10:38:43 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 10:38:43 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110314093843.A71D7282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42598:b5db96b7183c Date: 2011-03-14 10:01 +0100 http://bitbucket.org/pypy/pypy/changeset/b5db96b7183c/ Log: merge heads diff --git a/pypy/jit/metainterp/nounroll_optimize.py b/pypy/jit/metainterp/nounroll_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/nounroll_optimize.py +++ /dev/null @@ -1,36 +0,0 @@ - -from pypy.rlib.debug import debug_start, debug_stop -from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1 - -def optimize_loop(metainterp_sd, old_loop_tokens, loop): - debug_start("jit-optimize") - try: - return _optimize_loop(metainterp_sd, old_loop_tokens, loop) - finally: - debug_stop("jit-optimize") - -def _optimize_loop(metainterp_sd, old_loop_tokens, loop): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) - if old_loop_tokens: - return old_loop_tokens[0] - optimize_loop_1(metainterp_sd, loop, False) - return None - -def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, - inline_short_preamble, retraced=False): - debug_start("jit-optimize") - try: - return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge) - finally: - debug_stop("jit-optimize") - -def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge): - cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) - if old_loop_tokens: - old_loop_token = old_loop_tokens[0] - bridge.operations[-1].setdescr(old_loop_token) # patch jump target - optimize_bridge_1(metainterp_sd, bridge) - return old_loop_token - return None diff --git a/pypy/jit/metainterp/test/test_send_nounroll.py b/pypy/jit/metainterp/test/test_send_nounroll.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_nounroll.py +++ /dev/null @@ -1,30 +0,0 @@ - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_NO_UNROLL -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopNoPSpecTest(test_send.SendTests): - optimizer=OPTIMIZER_NO_UNROLL - - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - - -class TestLLtype(LoopNoPSpecTest, LLJitMixin): - pass - -class TestOOtype(LoopNoPSpecTest, OOJitMixin): - pass diff --git a/pypy/jit/metainterp/test/test_send_simple.py b/pypy/jit/metainterp/test/test_send_simple.py deleted file mode 100644 --- a/pypy/jit/metainterp/test/test_send_simple.py +++ /dev/null @@ -1,29 +0,0 @@ -# xxx mostly pointless - -from pypy.jit.metainterp.test import test_loop, test_send -from pypy.jit.metainterp.warmspot import ll_meta_interp -from pypy.rlib.jit import OPTIMIZER_SIMPLE -from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin - -class LoopDummyTest(test_send.SendTests): - optimizer=OPTIMIZER_SIMPLE - def meta_interp(self, func, args, **kwds): - return ll_meta_interp(func, args, optimizer=self.optimizer, - CPUClass=self.CPUClass, - type_system=self.type_system, - **kwds) - - def check_loops(self, *args, **kwds): - pass - - def check_loop_count(self, count): - pass - - def check_jumps(self, maxcount): - pass - -class TestLLtype(LoopDummyTest, LLJitMixin): - pass - -class TestOOtype(LoopDummyTest, OOJitMixin): - pass From commits-noreply at bitbucket.org Mon Mar 14 10:38:44 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 10:38:44 +0100 (CET) Subject: [pypy-svn] pypy default: fix an obscure case in test_pypy_c: if by chance the JIT traces code in nanos.py, the jitlogparser crashed because the co_firstlineno attribute is not accurate Message-ID: <20110314093844.A19E0282BD9@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42599:b37436954860 Date: 2011-03-14 10:38 +0100 http://bitbucket.org/pypy/pypy/changeset/b37436954860/ Log: fix an obscure case in test_pypy_c: if by chance the JIT traces code in nanos.py, the jitlogparser crashed because the co_firstlineno attribute is not accurate diff --git a/pypy/tool/jitlogparser/storage.py b/pypy/tool/jitlogparser/storage.py --- a/pypy/tool/jitlogparser/storage.py +++ b/pypy/tool/jitlogparser/storage.py @@ -37,7 +37,17 @@ try: return self.disassembled_codes[key] except KeyError: - res = dis(self.load_code(fname)[startlineno]) + codeobjs = self.load_code(fname) + if startlineno not in codeobjs: + # cannot find the code obj at this line: this can happen for + # various reasons, e.g. because the .py files changed since + # the log was produced, or because the co_firstlineno + # attribute of the code object is wrong (e.g., code objects + # produced by gateway.applevel(), such as the ones found in + # nanos.py) + return None + code = codeobjs[startlineno] + res = dis(code) self.disassembled_codes[key] = res return res From commits-noreply at bitbucket.org Mon Mar 14 11:38:46 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 11:38:46 +0100 (CET) Subject: [pypy-svn] pypy default: make sure that we do not generate bridges because of checkinterval Message-ID: <20110314103846.6644E36C20C@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42600:7a8c9e389116 Date: 2011-03-14 11:38 +0100 http://bitbucket.org/pypy/pypy/changeset/7a8c9e389116/ Log: make sure that we do not generate bridges because of checkinterval diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -29,6 +29,10 @@ # write the snippet arglist = ', '.join(map(repr, args)) with self.filepath.open("w") as f: + # we don't want to see the small bridges created + # by the checkinterval reaching the limit + f.write("import sys\n") + f.write("sys.setcheckinterval(10000000)\n") f.write(str(src) + "\n") f.write("print %s(%s)\n" % (funcname, arglist)) # From commits-noreply at bitbucket.org Mon Mar 14 12:21:23 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 12:21:23 +0100 (CET) Subject: [pypy-svn] pypy default: finally fix test_f1 after the merge of jit-virtual_state. Thanks hakan Message-ID: <20110314112123.6E8F5282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42601:c59bf3abd6a1 Date: 2011-03-14 12:21 +0100 http://bitbucket.org/pypy/pypy/changeset/c59bf3abd6a1/ Log: finally fix test_f1 after the merge of jit-virtual_state. Thanks hakan diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -22,8 +22,12 @@ return x log = self.run(f1, [2117]) assert log.result == 1083876708 - loop, = log.loops_by_filename(self.filepath) - assert loop.match(""" + # we get two loops: in the initial one "i" is only read and thus is + # not virtual, then "i" is written and thus we get a new loop where + # "i" is virtual. However, in this specific case the two loops happen + # to contain the very same operations + loop0, loop1 = log.loops_by_filename(self.filepath) + expected = """ i9 = int_le(i7, i8) guard_true(i9, descr=...) i11 = int_add_ovf(i7, 1) @@ -33,7 +37,9 @@ guard_no_overflow(descr=...) --TICK-- jump(p0, p1, p2, p3, p4, p5, i13, i11, i8, descr=...) - """) + """ + assert loop0.match(expected) + assert loop1.match(expected) def test_factorial(self): def fact(n): From commits-noreply at bitbucket.org Mon Mar 14 12:55:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 12:55:14 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: This test passes now :-) Message-ID: <20110314115514.84C85282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42602:ed561da3142d Date: 2011-03-14 11:57 +0100 http://bitbucket.org/pypy/pypy/changeset/ed561da3142d/ Log: This test passes now :-) diff --git a/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py b/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py --- a/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py @@ -125,7 +125,6 @@ prototype = self.functype.im_func(object) self.assertRaises(TypeError, prototype, lambda: None) - @xfail def test_issue_7959(self): proto = self.functype.im_func(None) From commits-noreply at bitbucket.org Mon Mar 14 12:55:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 12:55:14 +0100 (CET) Subject: [pypy-svn] pypy extend-rweakdict: Close branch Message-ID: <20110314115514.BF98D282BD4@codespeak.net> Author: Amaury Forgeot d'Arc Branch: extend-rweakdict Changeset: r42603:1cbb50591d3f Date: 2011-03-14 12:50 +0100 http://bitbucket.org/pypy/pypy/changeset/1cbb50591d3f/ Log: Close branch From commits-noreply at bitbucket.org Mon Mar 14 12:55:15 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 12:55:15 +0100 (CET) Subject: [pypy-svn] pypy default: Merge extend-rweakdict: rweakref.RWeakValueDictionary now works with integer keys. Message-ID: <20110314115515.15DC6282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42604:53046cd0b405 Date: 2011-03-14 12:52 +0100 http://bitbucket.org/pypy/pypy/changeset/53046cd0b405/ Log: Merge extend-rweakdict: rweakref.RWeakValueDictionary now works with integer keys. Use it in _rawffi callbacks, and fix one test. From commits-noreply at bitbucket.org Mon Mar 14 12:55:15 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 12:55:15 +0100 (CET) Subject: [pypy-svn] pypy default: Merge heads Message-ID: <20110314115515.6718C282BD4@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42605:57e39a13c2b9 Date: 2011-03-14 12:54 +0100 http://bitbucket.org/pypy/pypy/changeset/57e39a13c2b9/ Log: Merge heads From commits-noreply at bitbucket.org Mon Mar 14 12:56:21 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 12:56:21 +0100 (CET) Subject: [pypy-svn] pypy default: Remove useless import Message-ID: <20110314115621.7F4AF282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42606:2a05a00afd77 Date: 2011-03-14 12:55 +0100 http://bitbucket.org/pypy/pypy/changeset/2a05a00afd77/ Log: Remove useless import diff --git a/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py b/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py --- a/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_callbacks.py @@ -1,7 +1,6 @@ import unittest from ctypes import * import _ctypes_test -from ctypes.test import xfail class Callbacks(unittest.TestCase): functype = CFUNCTYPE From commits-noreply at bitbucket.org Mon Mar 14 14:55:47 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 14:55:47 +0100 (CET) Subject: [pypy-svn] pypy default: try to print relevant information when the loop does not match Message-ID: <20110314135547.DB113282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42607:09dbc4efa036 Date: 2011-03-14 14:45 +0100 http://bitbucket.org/pypy/pypy/changeset/09dbc4efa036/ Log: try to print relevant information when the loop does not match diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -1,4 +1,5 @@ import py +import sys import re import os.path from pypy.tool.jitlogparser.parser import SimpleParser, Function, TraceForOpcode @@ -132,12 +133,15 @@ for op in self._ops_for_chunk(chunk, include_debug_merge_points): yield op - def print_ops(self, id=None, **kwds): + def format_ops(self, id=None, **kwds): if id is None: ops = self.allops() else: ops = self.ops_by_id(id, **kwds) - print '\n'.join(map(str, ops)) + return '\n'.join(map(str, ops)) + + def print_ops(self, *args, **kwds): + print self.format_ops(*args, **kwds) def ops_by_id(self, id, include_debug_merge_points=False, opcode=None): opcode_name = opcode @@ -151,7 +155,7 @@ def match(self, expected_src): ops = list(self.allops()) - matcher = OpMatcher(ops) + matcher = OpMatcher(ops, src=self.format_ops()) return matcher.match(expected_src) def match_by_id(self, id, expected_src): @@ -160,12 +164,36 @@ return matcher.match(expected_src) class InvalidMatch(Exception): - pass + + def __init__(self, message, frame): + Exception.__init__(self, message) + # copied and adapted from pytest's magic AssertionError + f = py.code.Frame(frame) + try: + source = f.code.fullsource + if source is not None: + try: + source = source.getstatement(f.lineno) + except IndexError: + source = None + else: + source = str(source.deindent()).strip() + except py.error.ENOENT: + source = None + if source and source.startswith('self._assert('): + # transform self._assert(x, 'foo') into assert x, 'foo' + source = source.replace('self._assert(', 'assert ') + source = source[:-1] # remove the trailing ')' + self.msg = py.code._reinterpret(source, f, should_fail=True) + else: + self.msg = "" + class OpMatcher(object): - def __init__(self, ops): + def __init__(self, ops, src=None): self.ops = ops + self.src = src self.alpha_map = {} @classmethod @@ -195,9 +223,7 @@ args = args[:-1] args = args.split(',') args = map(str.strip, args) - if args == ['']: - args = [] - if args and args[-1].startswith('descr='): + if args[-1].startswith('descr='): descr = args.pop() descr = descr[len('descr='):] else: @@ -236,7 +262,7 @@ def _assert(self, cond, message): if not cond: - raise InvalidMatch(message) + raise InvalidMatch(message, frame=sys._getframe(1)) def match_op(self, op, (exp_opname, exp_res, exp_args, exp_descr)): self._assert(op.name == exp_opname, "operation mismatch") @@ -302,8 +328,18 @@ expected_ops = self.parse_ops(expected_src) try: self.match_loop(expected_ops) - except InvalidMatch: + except InvalidMatch, e: #raise # uncomment this and use py.test --pdb for better debugging + print '@' * 40 + print "Loops don't match" + print "=================" + print e.msg + print + print "Got:" + print py.code.Source(self.src).deindent().indent() + print + print "Expected:" + print py.code.Source(expected_src).deindent().indent() return False else: return True diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -98,7 +98,7 @@ def match(self, src1, src2): from pypy.tool.jitlogparser.parser import SimpleParser loop = SimpleParser.parse_from_input(src1) - matcher = OpMatcher(loop.operations) + matcher = OpMatcher(loop.operations, src=src1) return matcher.match(src2) def test_match_var(self): From commits-noreply at bitbucket.org Mon Mar 14 14:55:48 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 14:55:48 +0100 (CET) Subject: [pypy-svn] pypy default: re-revert this change, which was reverted by mistake Message-ID: <20110314135548.6ED44282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42608:9ed88cf1ba2a Date: 2011-03-14 14:52 +0100 http://bitbucket.org/pypy/pypy/changeset/9ed88cf1ba2a/ Log: re-revert this change, which was reverted by mistake diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -223,7 +223,9 @@ args = args[:-1] args = args.split(',') args = map(str.strip, args) - if args[-1].startswith('descr='): + if args == ['']: + args = [] + if args and args[-1].startswith('descr='): descr = args.pop() descr = descr[len('descr='):] else: @@ -324,6 +326,11 @@ self._next_op(iter_ops, assert_raises=True) def match(self, expected_src): + def format(src): + if src is None: + return '' + return py.code.Source(src).deindent().indent() + # expected_src = self.preprocess_expected_src(expected_src) expected_ops = self.parse_ops(expected_src) try: @@ -336,10 +343,10 @@ print e.msg print print "Got:" - print py.code.Source(self.src).deindent().indent() + print format(self.src) print print "Expected:" - print py.code.Source(expected_src).deindent().indent() + print format(expected_src) return False else: return True From commits-noreply at bitbucket.org Mon Mar 14 14:55:48 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 14:55:48 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110314135548.A99BE282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42609:18b15bde2dbc Date: 2011-03-14 14:55 +0100 http://bitbucket.org/pypy/pypy/changeset/18b15bde2dbc/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 14 16:16:47 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 16:16:47 +0100 (CET) Subject: [pypy-svn] pypy default: do not crash in case the file is there but we do not have rights to open it Message-ID: <20110314151647.1FB7F282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42610:94c4f4d19961 Date: 2011-03-14 16:16 +0100 http://bitbucket.org/pypy/pypy/changeset/94c4f4d19961/ Log: do not crash in case the file is there but we do not have rights to open it diff --git a/pypy/tool/jitlogparser/storage.py b/pypy/tool/jitlogparser/storage.py --- a/pypy/tool/jitlogparser/storage.py +++ b/pypy/tool/jitlogparser/storage.py @@ -31,8 +31,11 @@ return res def disassemble_code(self, fname, startlineno): - if py.path.local(fname).check(file=False): - return None # cannot find source file + try: + if py.path.local(fname).check(file=False): + return None # cannot find source file + except py.error.EACCES: + return None # cannot open the file key = (fname, startlineno) try: return self.disassembled_codes[key] From commits-noreply at bitbucket.org Mon Mar 14 16:36:14 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 16:36:14 +0100 (CET) Subject: [pypy-svn] pypy default: Fix translation hopefully Message-ID: <20110314153614.C551B282B90@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42611:411e1e001c5c Date: 2011-03-14 11:35 -0400 http://bitbucket.org/pypy/pypy/changeset/411e1e001c5c/ Log: Fix translation hopefully diff --git a/pypy/rpython/module/ll_os.py b/pypy/rpython/module/ll_os.py --- a/pypy/rpython/module/ll_os.py +++ b/pypy/rpython/module/ll_os.py @@ -845,7 +845,8 @@ def os_read_oofakeimpl(fd, count): return OOSupport.to_rstr(os.read(fd, count)) - return extdef([int, int], str, "ll_os.ll_os_read", + return extdef([int, int], SomeString(can_be_None=True), + "ll_os.ll_os_read", llimpl=os_read_llimpl, oofakeimpl=os_read_oofakeimpl) @registering(os.write) diff --git a/pypy/rpython/module/ll_strtod.py b/pypy/rpython/module/ll_strtod.py --- a/pypy/rpython/module/ll_strtod.py +++ b/pypy/rpython/module/ll_strtod.py @@ -8,6 +8,7 @@ from pypy.rlib import rposix from pypy.translator.tool.cbuild import ExternalCompilationInfo from pypy.tool.autopath import pypydir +from pypy.annotation.model import SomeString class CConfig: _compilation_info_ = ExternalCompilationInfo( @@ -62,7 +63,9 @@ def oofakeimpl(x, code, precision, flags): return ootype.oostring(rfloat.formatd(x, code, precision, flags), -1) - return extdef([float, lltype.Char, int, int], str, 'll_strtod.ll_strtod_formatd', + return extdef([float, lltype.Char, int, int], + SomeString(can_be_None=True), + 'll_strtod.ll_strtod_formatd', llimpl=llimpl, oofakeimpl=oofakeimpl, sandboxsafe=True) @@ -83,7 +86,8 @@ return rfloat.parts_to_float(sign._str, beforept._str, afterpt._str, exponent._str) - return extdef([str, str, str, str], float, + tp = SomeString(can_be_None=True) + return extdef([tp, tp, tp, tp], float, 'll_strtod.ll_strtod_parts_to_float', llimpl=llimpl, oofakeimpl=oofakeimpl, sandboxsafe=True) From commits-noreply at bitbucket.org Mon Mar 14 16:36:15 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 16:36:15 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110314153615.12B1C282BD6@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42612:05904287bbbd Date: 2011-03-14 11:35 -0400 http://bitbucket.org/pypy/pypy/changeset/05904287bbbd/ Log: merge From commits-noreply at bitbucket.org Mon Mar 14 17:00:01 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 17:00:01 +0100 (CET) Subject: [pypy-svn] pypy str-cmp-opt: Turn a call to strcmp where both strings are known lenght 1 into a subtraction. Message-ID: <20110314160001.15288282B90@codespeak.net> Author: Alex Gaynor Branch: str-cmp-opt Changeset: r42613:82a6054c3316 Date: 2011-03-14 11:59 -0400 http://bitbucket.org/pypy/pypy/changeset/82a6054c3316/ Log: Turn a call to strcmp where both strings are known lenght 1 into a subtraction. diff --git a/pypy/jit/metainterp/optimizeopt/string.py b/pypy/jit/metainterp/optimizeopt/string.py --- a/pypy/jit/metainterp/optimizeopt/string.py +++ b/pypy/jit/metainterp/optimizeopt/string.py @@ -370,7 +370,7 @@ def reconstruct_for_next_iteration(self, optimizer, valuemap): self.enabled = True return self - + def make_vstring_plain(self, box, source_op, mode): vvalue = VStringPlainValue(self.optimizer, box, source_op, mode) self.make_equal_to(box, vvalue) @@ -640,6 +640,27 @@ return True return False + def opt_call_stroruni_STR_CMP(self, op, mode): + v1 = self.getvalue(op.getarg(1)) + v2 = self.getvalue(op.getarg(2)) + + l1box = v1.getstrlen(None, mode) + l2box = v2.getstrlen(None, mode) + if (l1box is not None and l2box is not None and + isinstance(l1box, ConstInt) and isinstance(l2box, ConstInt) and + l1box.value == 1 and l2box.value == 1): + + vchar1 = self.strgetitem(v1, optimizer.CVAL_ZERO, mode) + vchar2 = self.strgetitem(v2, optimizer.CVAL_ZERO, mode) + self.optimizer.send_extra_operation( + ResOperation( + rop.INT_SUB, [vchar1.force_box(), vchar2.force_box()], op.result + ) + ) + return True + return False + + def generate_modified_call(self, oopspecindex, args, result, mode): oopspecindex += mode.OS_offset cic = self.optimizer.metainterp_sd.callinfocollection @@ -652,7 +673,7 @@ if not self.enabled: self.emit_operation(op) return - + opnum = op.getopnum() for value, func in optimize_ops: if opnum == value: diff --git a/pypy/jit/codewriter/effectinfo.py b/pypy/jit/codewriter/effectinfo.py --- a/pypy/jit/codewriter/effectinfo.py +++ b/pypy/jit/codewriter/effectinfo.py @@ -30,6 +30,7 @@ OS_STREQ_NONNULL_CHAR = 29 # s1 == char (assert s1!=NULL) OS_STREQ_CHECKNULL_CHAR = 30 # s1!=NULL and s1==char OS_STREQ_LENGTHOK = 31 # s1 == s2 (assert len(s1)==len(s2)) + OS_STR_CMP = 32 # OS_UNI_CONCAT = 42 # OS_UNI_SLICE = 43 # @@ -41,6 +42,7 @@ OS_UNIEQ_NONNULL_CHAR = 49 # (must be the same amount as for OS_UNIEQ_CHECKNULL_CHAR = 50 # STR, in the same order) OS_UNIEQ_LENGTHOK = 51 # + OS_UNI_CMP = 52 _OS_offset_uni = OS_UNI_CONCAT - OS_STR_CONCAT # OS_LIBFFI_PREPARE = 60 @@ -87,7 +89,7 @@ result = object.__new__(cls) result.readonly_descrs_fields = readonly_descrs_fields if extraeffect == EffectInfo.EF_LOOPINVARIANT or \ - extraeffect == EffectInfo.EF_PURE: + extraeffect == EffectInfo.EF_PURE: result.write_descrs_fields = [] result.write_descrs_arrays = [] else: diff --git a/pypy/jit/metainterp/test/test_optimizeutil.py b/pypy/jit/metainterp/test/test_optimizeutil.py --- a/pypy/jit/metainterp/test/test_optimizeutil.py +++ b/pypy/jit/metainterp/test/test_optimizeutil.py @@ -122,7 +122,8 @@ ('streq_nonnull_char_descr', 'OS_STREQ_NONNULL_CHAR'), ('streq_checknull_char_descr', 'OS_STREQ_CHECKNULL_CHAR'), ('streq_lengthok_descr', 'OS_STREQ_LENGTHOK'), - ]: + ('strcmpdescr', 'OS_STR_CMP'), + ]: _oopspecindex = getattr(EffectInfo, _os) locals()[_name] = \ cpu.calldescrof(FUNC, FUNC.ARGS, FUNC.RESULT, @@ -165,7 +166,7 @@ ## def get_class_of_box(self, box): ## root = box.getref(ootype.ROOT) ## return ootype.classof(root) - + ## cpu = runner.OOtypeCPU(None) ## NODE = ootype.Instance('NODE', ootype.ROOT, {}) ## NODE._add_fields({'value': ootype.Signed, diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py --- a/pypy/rpython/lltypesystem/rstr.py +++ b/pypy/rpython/lltypesystem/rstr.py @@ -425,6 +425,7 @@ return diff i += 1 return len1 - len2 + ll_strcmp.oopspec = 'stroruni.cmp(s1, s2)' @purefunction def ll_streq(s1, s2): diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py --- a/pypy/jit/codewriter/jtransform.py +++ b/pypy/jit/codewriter/jtransform.py @@ -1268,12 +1268,14 @@ dict = {"stroruni.concat": EffectInfo.OS_STR_CONCAT, "stroruni.slice": EffectInfo.OS_STR_SLICE, "stroruni.equal": EffectInfo.OS_STR_EQUAL, + "stroruni.cmp": EffectInfo.OS_STR_CMP, } CHR = lltype.Char elif SoU.TO == rstr.UNICODE: dict = {"stroruni.concat": EffectInfo.OS_UNI_CONCAT, "stroruni.slice": EffectInfo.OS_UNI_SLICE, "stroruni.equal": EffectInfo.OS_UNI_EQUAL, + "stroruni.cmp": EffectInfo.OS_UNI_CMP, } CHR = lltype.UniChar else: diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5464,6 +5464,26 @@ """ self.optimize_strunicode_loop_extradescrs(ops, expected) + def test_str_cmp_char(self): + ops = """ + [i0] + p0 = newstr(1) + strsetitem(p0, 0, i0) + i1 = call(0, p0, s"0", descr=strcmpdescr) + i2 = int_le(i1, 0) + guard_true(i2) [] + jump(i0) + """ + expected = """ + [i0] + # ord("0") + i1 = int_sub(i0, 48) + i2 = int_le(i1, 0) + guard_true(i2) [] + jump(i0) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected) + def test_str2unicode_constant(self): ops = """ [] From commits-noreply at bitbucket.org Mon Mar 14 17:07:57 2011 From: commits-noreply at bitbucket.org (tav) Date: Mon, 14 Mar 2011 17:07:57 +0100 (CET) Subject: [pypy-svn] pypy default: Removed misleading refs to pypy/doc and svn from the README. Message-ID: <20110314160757.7CBEA282B90@codespeak.net> Author: tav Branch: Changeset: r42614:a98716e2324e Date: 2011-03-14 16:07 +0000 http://bitbucket.org/pypy/pypy/changeset/a98716e2324e/ Log: Removed misleading refs to pypy/doc and svn from the README. diff --git a/README b/README --- a/README +++ b/README @@ -1,6 +1,6 @@ -====================================== -PyPy: Python in Python implementation -====================================== +===================================== +PyPy: Python in Python Implementation +===================================== Welcome to PyPy! @@ -13,18 +13,12 @@ http://pypy.org/ -We invite you to head over to our detailed getting-started document: - - pypy/doc/getting-started.html or - pypy/doc/getting-started.txt - (local if you got a source tarball or svn checkout) +The getting-started document will help guide you: http://codespeak.net/pypy/dist/pypy/doc/getting-started.html -which gives you many good starting and entry points into playing with -PyPy. It will also point you to our documentation section which is -generated from information in the pypy/doc directory. - -Enjoy and send us feedback! +It will also point you to the rest of the documentation which is generated +from files in the pypy/doc directory within the source repositories. Enjoy +and send us feedback! the pypy-dev team From commits-noreply at bitbucket.org Mon Mar 14 17:12:02 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 17:12:02 +0100 (CET) Subject: [pypy-svn] pypy default: fix translation Message-ID: <20110314161202.9DC7A282B90@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42615:09cb6b33b0ae Date: 2011-03-14 12:11 -0400 http://bitbucket.org/pypy/pypy/changeset/09cb6b33b0ae/ Log: fix translation diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -171,6 +171,9 @@ class Optimization(object): next_optimization = None + + def __init__(self): + pass # make rpython happy def propagate_forward(self, op): raise NotImplementedError From commits-noreply at bitbucket.org Mon Mar 14 17:12:02 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 17:12:02 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110314161202.E51DD282BD6@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42616:8e6140722f82 Date: 2011-03-14 12:11 -0400 http://bitbucket.org/pypy/pypy/changeset/8e6140722f82/ Log: merge From commits-noreply at bitbucket.org Mon Mar 14 17:24:06 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 17:24:06 +0100 (CET) Subject: [pypy-svn] pypy default: improve the output if the test fails Message-ID: <20110314162406.4CD5A282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42617:7ceeee61eb4c Date: 2011-03-14 17:23 +0100 http://bitbucket.org/pypy/pypy/changeset/7ceeee61eb4c/ Log: improve the output if the test fails diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -160,7 +160,7 @@ def match_by_id(self, id, expected_src): ops = list(self.ops_by_id(id)) - matcher = OpMatcher(ops) + matcher = OpMatcher(ops, src=self.format_ops(id)) return matcher.match(expected_src) class InvalidMatch(Exception): From commits-noreply at bitbucket.org Mon Mar 14 17:38:55 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 14 Mar 2011 17:38:55 +0100 (CET) Subject: [pypy-svn] pypy default: use an iterative instead of a recursive algorithm here Message-ID: <20110314163855.C8EBB282B90@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42618:ac10779c0cf6 Date: 2011-03-14 15:44 +0100 http://bitbucket.org/pypy/pypy/changeset/ac10779c0cf6/ Log: use an iterative instead of a recursive algorithm here diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -94,6 +94,10 @@ return index def _index(self, selector): + while isinstance(self, PlainAttribute): + if selector == self.selector: + return self.position + self = self.back return -1 def copy(self, obj): @@ -274,11 +278,6 @@ self._copy_attr(obj, new_obj) return new_obj - def _index(self, selector): - if selector == self.selector: - return self.position - return self.back._index(selector) - def copy(self, obj): new_obj = self.back.copy(obj) self._copy_attr(obj, new_obj) diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -51,6 +51,13 @@ assert aa.get_terminator() is aa.back.back +def test_huge_chain(): + current = Terminator(space, "cls") + for i in range(20000): + current = PlainAttribute((str(i), DICT), current) + assert current.index(("0", DICT)) == 0 + + def test_search(): aa = PlainAttribute(("b", DICT), PlainAttribute(("a", DICT), Terminator(None, None))) assert aa.search(DICT) is aa From commits-noreply at bitbucket.org Mon Mar 14 17:38:56 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 14 Mar 2011 17:38:56 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110314163856.0DD93282BD6@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42619:35bf8d02c1e1 Date: 2011-03-14 17:38 +0100 http://bitbucket.org/pypy/pypy/changeset/35bf8d02c1e1/ Log: merge From commits-noreply at bitbucket.org Mon Mar 14 17:48:22 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 17:48:22 +0100 (CET) Subject: [pypy-svn] pypy default: make it possible to use regexps to match descrs Message-ID: <20110314164822.735B3282B90@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42624:88faf282394b Date: 2011-03-14 17:42 +0100 http://bitbucket.org/pypy/pypy/changeset/88faf282394b/ Log: make it possible to use regexps to match descrs diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -262,6 +262,12 @@ self.alpha_map[v1] = exp_v2 return self.alpha_map[v1] == exp_v2 + def match_descr(self, descr, exp_descr): + if descr == exp_descr or exp_descr == '...': + return True + match = exp_descr is not None and re.match(exp_descr, descr) + self._assert(match, "descr mismatch") + def _assert(self, cond, message): if not cond: raise InvalidMatch(message, frame=sys._getframe(1)) @@ -272,7 +278,8 @@ self._assert(len(op.args) == len(exp_args), "wrong number of arguments") for arg, exp_arg in zip(op.args, exp_args): self._assert(self.match_var(arg, exp_arg), "variable mismatch") - self._assert(op.descr == exp_descr or exp_descr == '...', "descr mismatch") + self.match_descr(op.descr, exp_descr) + def _next_op(self, iter_ops, assert_raises=False): try: diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -173,6 +173,7 @@ """ assert self.match(loop, "setfield_gc(p0, 1, descr=)") assert self.match(loop, "setfield_gc(p0, 1, descr=...)") + assert self.match(loop, "setfield_gc(p0, 1, descr=<.*bar>)") assert not self.match(loop, "setfield_gc(p0, 1)") assert not self.match(loop, "setfield_gc(p0, 1, descr=)") From commits-noreply at bitbucket.org Mon Mar 14 17:48:24 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 17:48:24 +0100 (CET) Subject: [pypy-svn] pypy default: don't hardcode the field offset, we don't care for its exact value Message-ID: <20110314164824.C46BC282BE3@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42625:747a43db7681 Date: 2011-03-14 17:47 +0100 http://bitbucket.org/pypy/pypy/changeset/747a43db7681/ Log: don't hardcode the field offset, we don't care for its exact value diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -239,9 +239,9 @@ # replaced with the corresponding operations, so that tests don't have # to repeat it every time ticker_check = """ - ticker0 = getfield_raw(ticker_address, descr=) + ticker0 = getfield_raw(ticker_address, descr=) ticker1 = int_sub(ticker0, 1) - setfield_raw(ticker_address, ticker1, descr=) + setfield_raw(ticker_address, ticker1, descr=) ticker_cond = int_lt(ticker1, 0) guard_false(ticker_cond, descr=...) """ diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -164,20 +164,20 @@ assert log.opnames(ops) == ["getfield_gc", "guard_nonnull_class"] # assert entry_bridge.match_by_id('call', """ - p29 = getfield_gc(ConstPtr(ptr28), descr=) + p29 = getfield_gc(ConstPtr(ptr28), descr=) guard_nonnull_class(p29, ConstClass(Function), descr=) - i32 = getfield_gc(p0, descr=) + i32 = getfield_gc(p0, descr=) guard_false(i32, descr=) - p33 = getfield_gc(p29, descr=) + p33 = getfield_gc(p29, descr=) guard_value(p33, ConstPtr(ptr34), descr=) - p35 = getfield_gc(p29, descr=) - p36 = getfield_gc(p29, descr=) + p35 = getfield_gc(p29, descr=) + p36 = getfield_gc(p29, descr=) p38 = call(ConstClass(getexecutioncontext), descr=) - p39 = getfield_gc(p38, descr=) + p39 = getfield_gc(p38, descr=) i40 = force_token() - p41 = getfield_gc(p38, descr=) + p41 = getfield_gc(p38, descr=) guard_isnull(p41, descr=) - i42 = getfield_gc(p38, descr=) + i42 = getfield_gc(p38, descr=) i43 = int_is_zero(i42) guard_true(i43, descr=) i50 = force_token() diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -370,9 +370,9 @@ guard_true(i6, descr=...) i8 = int_add(i4, 1) # signal checking stuff - i10 = getfield_raw(37212896, descr=) + i10 = getfield_raw(37212896, descr=<.* pypysig_long_struct.c_value .*>) i12 = int_sub(i10, 1) - setfield_raw(37212896, i12, descr=) + setfield_raw(37212896, i12, descr=<.* pypysig_long_struct.c_value .*>) i14 = int_lt(i12, 0) guard_false(i14, descr=...) jump(p0, p1, p2, p3, i8, descr=...) From commits-noreply at bitbucket.org Mon Mar 14 17:48:25 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 14 Mar 2011 17:48:25 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110314164825.19D4B282BE7@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42626:9ab6cbbe0a49 Date: 2011-03-14 17:48 +0100 http://bitbucket.org/pypy/pypy/changeset/9ab6cbbe0a49/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 14 18:30:29 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 18:30:29 +0100 (CET) Subject: [pypy-svn] pypy default: #672 -- fix a NameError in translation on osx Message-ID: <20110314173029.410FF36C20C@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42627:04257ccf2c0a Date: 2011-03-14 13:29 -0400 http://bitbucket.org/pypy/pypy/changeset/04257ccf2c0a/ Log: #672 -- fix a NameError in translation on osx diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -192,10 +192,8 @@ sandboxsafe=True) def get_darwin_cache_size(cache_key): - cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') - try: - len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') - try: + with lltype.scoped_alloc(rffi.LONGLONGP.TO, 1) as cache_p: + with lltype.scoped_alloc(rffi.SIZE_TP.TO, 1) as len_p: size = rffi.sizeof(rffi.LONGLONG) cache_p[0] = rffi.cast(rffi.LONGLONG, 0) len_p[0] = rffi.cast(rffi.SIZE_T, size) @@ -205,16 +203,13 @@ len_p, lltype.nullptr(rffi.VOIDP.TO), rffi.cast(rffi.SIZE_T, 0)) + cache = 0 if (rffi.cast(lltype.Signed, result) == 0 and rffi.cast(lltype.Signed, len_p[0]) == size): cache = rffi.cast(lltype.Signed, cache_p[0]) if rffi.cast(rffi.LONGLONG, cache) != cache_p[0]: cache = 0 # overflow! return cache - finally: - lltype.free(len_p, flavor='raw') - finally: - lltype.free(cache_p, flavor='raw') def get_L2cache_darwin(): From commits-noreply at bitbucket.org Mon Mar 14 18:30:29 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 18:30:29 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110314173029.72161282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42628:733ffa428874 Date: 2011-03-14 13:30 -0400 http://bitbucket.org/pypy/pypy/changeset/733ffa428874/ Log: merged upstream From commits-noreply at bitbucket.org Mon Mar 14 18:51:18 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 18:51:18 +0100 (CET) Subject: [pypy-svn] pypy default: jit_ffi is no longer a translation option. Message-ID: <20110314175118.6FF78282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42629:c651b6e165ea Date: 2011-03-14 13:51 -0400 http://bitbucket.org/pypy/pypy/changeset/c651b6e165ea/ Log: jit_ffi is no longer a translation option. diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -80,8 +80,7 @@ "_rawffi": [("objspace.usemodules.struct", True)], "cpyext": [("translation.secondaryentrypoints", "cpyext"), ("translation.shared", sys.platform == "win32")], - "_ffi": [("translation.jit_ffi", True)], - } +} module_import_dependencies = { # no _rawffi if importing pypy.rlib.clibffi raises ImportError @@ -352,7 +351,7 @@ config.objspace.std.suggest(builtinshortcut=True) config.objspace.std.suggest(optimized_list_getitem=True) config.objspace.std.suggest(getattributeshortcut=True) - config.objspace.std.suggest(newshortcut=True) + config.objspace.std.suggest(newshortcut=True) if not IS_64_BITS: config.objspace.std.suggest(withsmalllong=True) From commits-noreply at bitbucket.org Mon Mar 14 18:54:53 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 18:54:53 +0100 (CET) Subject: [pypy-svn] pypy default: fix the test enough to get a different failure (which fijal needs to fix ; )) Message-ID: <20110314175453.112F8282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42630:b7c7ed175556 Date: 2011-03-14 13:54 -0400 http://bitbucket.org/pypy/pypy/changeset/b7c7ed175556/ Log: fix the test enough to get a different failure (which fijal needs to fix ;)) diff --git a/pypy/jit/metainterp/test/test_ztranslation.py b/pypy/jit/metainterp/test/test_ztranslation.py --- a/pypy/jit/metainterp/test/test_ztranslation.py +++ b/pypy/jit/metainterp/test/test_ztranslation.py @@ -128,9 +128,9 @@ assert res == main(40) res = rpython_ll_meta_interp(main, [40], CPUClass=self.CPUClass, type_system=self.type_system, - optimizer=OPTIMIZER_FULL, + enable_opts='', ProfilerClass=Profiler) - assert res == main(40) + assert res == main(40) class TestTranslationLLtype(TranslationTest): From commits-noreply at bitbucket.org Mon Mar 14 19:29:41 2011 From: commits-noreply at bitbucket.org (migueldvb) Date: Mon, 14 Mar 2011 19:29:41 +0100 (CET) Subject: [pypy-svn] pypy default: add from __future__ import with_statement line Message-ID: <20110314182941.C51AB282B90@codespeak.net> Author: Miguel de Val Borro Branch: Changeset: r42631:0902f8205eb2 Date: 2011-03-14 19:29 +0100 http://bitbucket.org/pypy/pypy/changeset/0902f8205eb2/ Log: add from __future__ import with_statement line diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -6,6 +6,7 @@ from pypy.rlib.debug import debug_print, debug_start, debug_stop from pypy.rpython.lltypesystem import lltype, rffi from pypy.rpython.lltypesystem.lloperation import llop +from __future__ import with_statement # ____________________________________________________________ # Reading env vars. Supports returning ints, uints or floats, From commits-noreply at bitbucket.org Mon Mar 14 19:51:03 2011 From: commits-noreply at bitbucket.org (migueldvb) Date: Mon, 14 Mar 2011 19:51:03 +0100 (CET) Subject: [pypy-svn] pypy default: Fix previous commit Message-ID: <20110314185103.51378282B90@codespeak.net> Author: Miguel de Val Borro Branch: Changeset: r42632:5700ff03fdfa Date: 2011-03-14 19:50 +0100 http://bitbucket.org/pypy/pypy/changeset/5700ff03fdfa/ Log: Fix previous commit diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -1,12 +1,12 @@ """ Utilities to get environ variables and platform-specific memory-related values. """ +from __future__ import with_statement import os, sys from pypy.rlib.rarithmetic import r_uint from pypy.rlib.debug import debug_print, debug_start, debug_stop from pypy.rpython.lltypesystem import lltype, rffi from pypy.rpython.lltypesystem.lloperation import llop -from __future__ import with_statement # ____________________________________________________________ # Reading env vars. Supports returning ints, uints or floats, From commits-noreply at bitbucket.org Mon Mar 14 19:52:18 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 19:52:18 +0100 (CET) Subject: [pypy-svn] pypy default: Merged upstream. Message-ID: <20110314185218.AC5AF282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42633:f579aa0e9a27 Date: 2011-03-14 14:52 -0400 http://bitbucket.org/pypy/pypy/changeset/f579aa0e9a27/ Log: Merged upstream. From commits-noreply at bitbucket.org Mon Mar 14 20:21:56 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:21:56 +0100 (CET) Subject: [pypy-svn] pypy default: ctypes should not call a Structure user-defined __new__ and __init__, Message-ID: <20110314192156.791B2282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42634:e53e985d7523 Date: 2011-03-14 13:08 +0100 http://bitbucket.org/pypy/pypy/changeset/e53e985d7523/ Log: ctypes should not call a Structure user-defined __new__ and __init__, we may not know the signature. diff --git a/lib_pypy/_ctypes/structure.py b/lib_pypy/_ctypes/structure.py --- a/lib_pypy/_ctypes/structure.py +++ b/lib_pypy/_ctypes/structure.py @@ -134,7 +134,7 @@ __setattr__ = struct_setattr def from_address(self, address): - instance = self.__new__(self) + instance = StructOrUnion.__new__(self) instance.__dict__['_buffer'] = self._ffistruct.fromaddress(address) return instance @@ -156,7 +156,7 @@ return _CDataMeta.from_param(self, value) def _CData_output(self, resarray, base=None, index=-1): - res = self.__new__(self) + res = StructOrUnion.__new__(self) ffistruct = self._ffistruct.fromaddress(resarray.buffer) res.__dict__['_buffer'] = ffistruct res.__dict__['_base'] = base @@ -164,7 +164,7 @@ return res def _CData_retval(self, resbuffer): - res = self.__new__(self) + res = StructOrUnion.__new__(self) res.__dict__['_buffer'] = resbuffer res.__dict__['_base'] = None res.__dict__['_index'] = -1 diff --git a/lib-python/modified-2.7.0/ctypes/test/test_cast.py b/lib-python/modified-2.7.0/ctypes/test/test_cast.py --- a/lib-python/modified-2.7.0/ctypes/test/test_cast.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_cast.py @@ -2,8 +2,6 @@ import unittest import sys -from ctypes.test import xfail - class Test(unittest.TestCase): def test_array2pointer(self): diff --git a/lib-python/modified-2.7.0/ctypes/test/test_init.py b/lib-python/modified-2.7.0/ctypes/test/test_init.py --- a/lib-python/modified-2.7.0/ctypes/test/test_init.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_init.py @@ -1,6 +1,5 @@ from ctypes import * import unittest -from ctypes.test import xfail class X(Structure): _fields_ = [("a", c_int), @@ -21,7 +20,6 @@ class InitTest(unittest.TestCase): - @xfail def test_get(self): # make sure the only accessing a nested structure # doesn't call the structure's __new__ and __init__ diff --git a/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py b/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py --- a/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_prototypes.py @@ -1,6 +1,5 @@ from ctypes import * import unittest -from ctypes.test import xfail # IMPORTANT INFO: # From commits-noreply at bitbucket.org Mon Mar 14 20:21:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:21:57 +0100 (CET) Subject: [pypy-svn] pypy default: Implement _ctypes.call_function() and fix on test in test_random_things Message-ID: <20110314192157.D5FF4282BE3@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42635:869b900efc11 Date: 2011-03-14 13:53 +0100 http://bitbucket.org/pypy/pypy/changeset/869b900efc11/ Log: Implement _ctypes.call_function() and fix on test in test_random_things diff --git a/lib_pypy/_ctypes/__init__.py b/lib_pypy/_ctypes/__init__.py --- a/lib_pypy/_ctypes/__init__.py +++ b/lib_pypy/_ctypes/__init__.py @@ -4,7 +4,7 @@ from _ctypes.primitive import _SimpleCData from _ctypes.pointer import _Pointer, _cast_addr from _ctypes.pointer import POINTER, pointer, _pointer_type_cache -from _ctypes.function import CFuncPtr +from _ctypes.function import CFuncPtr, call_function from _ctypes.dll import dlopen from _ctypes.structure import Structure from _ctypes.array import Array diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -29,6 +29,12 @@ from _ctypes import COMError return COMError(errcode, None, None) +def call_function(func, args): + "Only for debugging so far: So that we can call CFunction instances" + funcptr = CFuncPtr(func) + funcptr.restype = int + return funcptr(*args) + class CFuncPtrType(_CDataMeta): # XXX write down here defaults and such things @@ -160,6 +166,8 @@ errcheck = property(_geterrcheck, _seterrcheck, _delerrcheck) def _ffishapes(self, args, restype): + if args is None: + args = [] argtypes = [arg._ffiargshape for arg in args] if restype is not None: if not isinstance(restype, SimpleType): From commits-noreply at bitbucket.org Mon Mar 14 20:21:58 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:21:58 +0100 (CET) Subject: [pypy-svn] pypy default: COM methods are boolean True. Fix one test in test_pointers.py Message-ID: <20110314192158.DFC1F282BE3@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42636:6981cdab2ad4 Date: 2011-03-14 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/6981cdab2ad4/ Log: COM methods are boolean True. Fix one test in test_pointers.py diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -509,7 +509,7 @@ return callargs, outargs def __nonzero__(self): - return bool(self._buffer[0]) + return self._com_index is not None or bool(self._buffer[0]) def __del__(self): if self._needs_free: From commits-noreply at bitbucket.org Mon Mar 14 20:21:59 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:21:59 +0100 (CET) Subject: [pypy-svn] pypy default: add a gc.collect() to fix the test Message-ID: <20110314192159.D14B3282BEC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42637:84cb7b18e494 Date: 2011-03-14 19:36 +0100 http://bitbucket.org/pypy/pypy/changeset/84cb7b18e494/ Log: add a gc.collect() to fix the test diff --git a/lib-python/modified-2.7.0/ctypes/test/test_refcounts.py b/lib-python/modified-2.7.0/ctypes/test/test_refcounts.py --- a/lib-python/modified-2.7.0/ctypes/test/test_refcounts.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_refcounts.py @@ -90,6 +90,7 @@ return a * b * 2 f = proto(func) + gc.collect() a = sys.getrefcount(ctypes.c_int) f(1, 2) self.assertEqual(sys.getrefcount(ctypes.c_int), a) From commits-noreply at bitbucket.org Mon Mar 14 20:22:02 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:22:02 +0100 (CET) Subject: [pypy-svn] pypy default: AST classes are not abstract anymore, and can be subclassed. Message-ID: <20110314192202.B4AD0282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42638:f540780193fd Date: 2011-03-14 19:50 +0100 http://bitbucket.org/pypy/pypy/changeset/f540780193fd/ Log: AST classes are not abstract anymore, and can be subclassed. Also update the error message to better match CPython. diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -349,16 +349,10 @@ setter = "%s_set_%s" % (name, field.name) config = (field.name, getter, setter, name) self.emit("%s=typedef.GetSetProperty(%s, %s, cls=%s)," % config, 1) - # CPython lets you create instances of "abstract" AST nodes - # like ast.expr or even ast.AST. This doesn't seem to useful - # and would be a pain to implement safely, so we don't allow - # it. - if concrete: - self.emit("__new__=interp2app(get_AST_new(%s))," % (name,), 1) - if needs_init: - self.emit("__init__=interp2app(%s_init)," % (name,), 1) + self.emit("__new__=interp2app(get_AST_new(%s))," % (name,), 1) + if needs_init: + self.emit("__init__=interp2app(%s_init)," % (name,), 1) self.emit(")") - self.emit("%s.typedef.acceptable_as_base_class = False" % (name,)) self.emit("") def make_init(self, name, fields): @@ -376,8 +370,9 @@ arity = len(fields) if arity: self.emit("if len(args_w) != %i:" % (arity,), 2) - self.emit("w_err = space.wrap(\"%s constructor takes 0 or %i " \ - "positional arguments\")" % (name, arity), 3) + plural = arity > 1 and "s" or "" + self.emit("w_err = space.wrap(\"%s constructor takes either 0 or %i " \ + "positional argument%s\")" % (name, arity, plural), 3) self.emit("raise OperationError(space.w_TypeError, w_err)", 3) self.emit("i = 0", 2) self.emit("for field in _%s_field_unroller:" % (name,), 2) @@ -609,6 +604,13 @@ return space.wrap(node) return func_with_new_name(generic_AST_new, "new_%s" % node_class.__name__) +def AST_init(space, w_self, __args__): + args_w, kwargs_w = __args__.unpack() + if args_w and len(args_w) != 0: + w_err = space.wrap("_ast.AST constructor takes 0 positional arguments") + raise OperationError(space.w_TypeError, w_err) + for field, w_value in kwargs_w.iteritems(): + space.setattr(w_self, space.wrap(field), w_value) AST.typedef = typedef.TypeDef("AST", _fields=_FieldsWrapper([]), @@ -618,8 +620,9 @@ __setstate__=interp2app(AST.setstate_w), __dict__ = typedef.GetSetProperty(typedef.descr_get_dict, typedef.descr_set_dict, cls=AST), + __new__=interp2app(get_AST_new(AST)), + __init__=interp2app(AST_init), ) -AST.typedef.acceptable_as_base_class = False def missing_field(space, state, required, host): diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -73,6 +73,13 @@ return space.wrap(node) return func_with_new_name(generic_AST_new, "new_%s" % node_class.__name__) +def AST_init(space, w_self, __args__): + args_w, kwargs_w = __args__.unpack() + if args_w and len(args_w) != 0: + w_err = space.wrap("_ast.AST constructor takes 0 positional arguments") + raise OperationError(space.w_TypeError, w_err) + for field, w_value in kwargs_w.iteritems(): + space.setattr(w_self, space.wrap(field), w_value) AST.typedef = typedef.TypeDef("AST", _fields=_FieldsWrapper([]), @@ -82,8 +89,9 @@ __setstate__=interp2app(AST.setstate_w), __dict__ = typedef.GetSetProperty(typedef.descr_get_dict, typedef.descr_set_dict, cls=AST), + __new__=interp2app(get_AST_new(AST)), + __init__=interp2app(AST_init), ) -AST.typedef.acceptable_as_base_class = False def missing_field(space, state, required, host): @@ -2953,8 +2961,8 @@ AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(mod)), ) -mod.typedef.acceptable_as_base_class = False def Module_get_body(space, w_self): if not w_self.initialization_state & 1: @@ -2981,7 +2989,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Module constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Module constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Module_field_unroller: @@ -2998,7 +3006,6 @@ __new__=interp2app(get_AST_new(Module)), __init__=interp2app(Module_init), ) -Module.typedef.acceptable_as_base_class = False def Interactive_get_body(space, w_self): if not w_self.initialization_state & 1: @@ -3025,7 +3032,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Interactive constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Interactive constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Interactive_field_unroller: @@ -3042,7 +3049,6 @@ __new__=interp2app(get_AST_new(Interactive)), __init__=interp2app(Interactive_init), ) -Interactive.typedef.acceptable_as_base_class = False def Expression_get_body(space, w_self): if w_self.w_dict is not None: @@ -3071,7 +3077,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Expression constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Expression constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Expression_field_unroller: @@ -3088,7 +3094,6 @@ __new__=interp2app(get_AST_new(Expression)), __init__=interp2app(Expression_init), ) -Expression.typedef.acceptable_as_base_class = False def Suite_get_body(space, w_self): if not w_self.initialization_state & 1: @@ -3115,7 +3120,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Suite constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Suite constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Suite_field_unroller: @@ -3132,7 +3137,6 @@ __new__=interp2app(get_AST_new(Suite)), __init__=interp2app(Suite_init), ) -Suite.typedef.acceptable_as_base_class = False def stmt_get_lineno(space, w_self): if w_self.w_dict is not None: @@ -3182,8 +3186,8 @@ _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(stmt_get_lineno, stmt_set_lineno, cls=stmt), col_offset=typedef.GetSetProperty(stmt_get_col_offset, stmt_set_col_offset, cls=stmt), + __new__=interp2app(get_AST_new(stmt)), ) -stmt.typedef.acceptable_as_base_class = False def FunctionDef_get_name(space, w_self): if w_self.w_dict is not None: @@ -3271,7 +3275,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 4: - w_err = space.wrap("FunctionDef constructor takes 0 or 4 positional arguments") + w_err = space.wrap("FunctionDef constructor takes either 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _FunctionDef_field_unroller: @@ -3291,7 +3295,6 @@ __new__=interp2app(get_AST_new(FunctionDef)), __init__=interp2app(FunctionDef_init), ) -FunctionDef.typedef.acceptable_as_base_class = False def ClassDef_get_name(space, w_self): if w_self.w_dict is not None: @@ -3377,7 +3380,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 4: - w_err = space.wrap("ClassDef constructor takes 0 or 4 positional arguments") + w_err = space.wrap("ClassDef constructor takes either 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ClassDef_field_unroller: @@ -3397,7 +3400,6 @@ __new__=interp2app(get_AST_new(ClassDef)), __init__=interp2app(ClassDef_init), ) -ClassDef.typedef.acceptable_as_base_class = False def Return_get_value(space, w_self): if w_self.w_dict is not None: @@ -3426,7 +3428,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Return constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Return constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Return_field_unroller: @@ -3443,7 +3445,6 @@ __new__=interp2app(get_AST_new(Return)), __init__=interp2app(Return_init), ) -Return.typedef.acceptable_as_base_class = False def Delete_get_targets(space, w_self): if not w_self.initialization_state & 1: @@ -3470,7 +3471,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Delete constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Delete constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Delete_field_unroller: @@ -3487,7 +3488,6 @@ __new__=interp2app(get_AST_new(Delete)), __init__=interp2app(Delete_init), ) -Delete.typedef.acceptable_as_base_class = False def Assign_get_targets(space, w_self): if not w_self.initialization_state & 1: @@ -3535,7 +3535,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Assign constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Assign constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Assign_field_unroller: @@ -3553,7 +3553,6 @@ __new__=interp2app(get_AST_new(Assign)), __init__=interp2app(Assign_init), ) -Assign.typedef.acceptable_as_base_class = False def AugAssign_get_target(space, w_self): if w_self.w_dict is not None: @@ -3625,7 +3624,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("AugAssign constructor takes 0 or 3 positional arguments") + w_err = space.wrap("AugAssign constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _AugAssign_field_unroller: @@ -3644,7 +3643,6 @@ __new__=interp2app(get_AST_new(AugAssign)), __init__=interp2app(AugAssign_init), ) -AugAssign.typedef.acceptable_as_base_class = False def Print_get_dest(space, w_self): if w_self.w_dict is not None: @@ -3713,7 +3711,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Print constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Print constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Print_field_unroller: @@ -3732,7 +3730,6 @@ __new__=interp2app(get_AST_new(Print)), __init__=interp2app(Print_init), ) -Print.typedef.acceptable_as_base_class = False def For_get_target(space, w_self): if w_self.w_dict is not None: @@ -3820,7 +3817,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 4: - w_err = space.wrap("For constructor takes 0 or 4 positional arguments") + w_err = space.wrap("For constructor takes either 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _For_field_unroller: @@ -3840,7 +3837,6 @@ __new__=interp2app(get_AST_new(For)), __init__=interp2app(For_init), ) -For.typedef.acceptable_as_base_class = False def While_get_test(space, w_self): if w_self.w_dict is not None: @@ -3907,7 +3903,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("While constructor takes 0 or 3 positional arguments") + w_err = space.wrap("While constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _While_field_unroller: @@ -3926,7 +3922,6 @@ __new__=interp2app(get_AST_new(While)), __init__=interp2app(While_init), ) -While.typedef.acceptable_as_base_class = False def If_get_test(space, w_self): if w_self.w_dict is not None: @@ -3993,7 +3988,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("If constructor takes 0 or 3 positional arguments") + w_err = space.wrap("If constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _If_field_unroller: @@ -4012,7 +4007,6 @@ __new__=interp2app(get_AST_new(If)), __init__=interp2app(If_init), ) -If.typedef.acceptable_as_base_class = False def With_get_context_expr(space, w_self): if w_self.w_dict is not None: @@ -4081,7 +4075,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("With constructor takes 0 or 3 positional arguments") + w_err = space.wrap("With constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _With_field_unroller: @@ -4100,7 +4094,6 @@ __new__=interp2app(get_AST_new(With)), __init__=interp2app(With_init), ) -With.typedef.acceptable_as_base_class = False def Raise_get_type(space, w_self): if w_self.w_dict is not None: @@ -4171,7 +4164,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Raise constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Raise constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Raise_field_unroller: @@ -4190,7 +4183,6 @@ __new__=interp2app(get_AST_new(Raise)), __init__=interp2app(Raise_init), ) -Raise.typedef.acceptable_as_base_class = False def TryExcept_get_body(space, w_self): if not w_self.initialization_state & 1: @@ -4255,7 +4247,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("TryExcept constructor takes 0 or 3 positional arguments") + w_err = space.wrap("TryExcept constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _TryExcept_field_unroller: @@ -4274,7 +4266,6 @@ __new__=interp2app(get_AST_new(TryExcept)), __init__=interp2app(TryExcept_init), ) -TryExcept.typedef.acceptable_as_base_class = False def TryFinally_get_body(space, w_self): if not w_self.initialization_state & 1: @@ -4320,7 +4311,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("TryFinally constructor takes 0 or 2 positional arguments") + w_err = space.wrap("TryFinally constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _TryFinally_field_unroller: @@ -4338,7 +4329,6 @@ __new__=interp2app(get_AST_new(TryFinally)), __init__=interp2app(TryFinally_init), ) -TryFinally.typedef.acceptable_as_base_class = False def Assert_get_test(space, w_self): if w_self.w_dict is not None: @@ -4388,7 +4378,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Assert constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Assert constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Assert_field_unroller: @@ -4406,7 +4396,6 @@ __new__=interp2app(get_AST_new(Assert)), __init__=interp2app(Assert_init), ) -Assert.typedef.acceptable_as_base_class = False def Import_get_names(space, w_self): if not w_self.initialization_state & 1: @@ -4433,7 +4422,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Import constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Import constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Import_field_unroller: @@ -4450,7 +4439,6 @@ __new__=interp2app(get_AST_new(Import)), __init__=interp2app(Import_init), ) -Import.typedef.acceptable_as_base_class = False def ImportFrom_get_module(space, w_self): if w_self.w_dict is not None: @@ -4522,7 +4510,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("ImportFrom constructor takes 0 or 3 positional arguments") + w_err = space.wrap("ImportFrom constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ImportFrom_field_unroller: @@ -4541,7 +4529,6 @@ __new__=interp2app(get_AST_new(ImportFrom)), __init__=interp2app(ImportFrom_init), ) -ImportFrom.typedef.acceptable_as_base_class = False def Exec_get_body(space, w_self): if w_self.w_dict is not None: @@ -4612,7 +4599,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Exec constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Exec constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Exec_field_unroller: @@ -4631,7 +4618,6 @@ __new__=interp2app(get_AST_new(Exec)), __init__=interp2app(Exec_init), ) -Exec.typedef.acceptable_as_base_class = False def Global_get_names(space, w_self): if not w_self.initialization_state & 1: @@ -4658,7 +4644,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Global constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Global constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Global_field_unroller: @@ -4675,7 +4661,6 @@ __new__=interp2app(get_AST_new(Global)), __init__=interp2app(Global_init), ) -Global.typedef.acceptable_as_base_class = False def Expr_get_value(space, w_self): if w_self.w_dict is not None: @@ -4704,7 +4689,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Expr constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Expr constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Expr_field_unroller: @@ -4721,7 +4706,6 @@ __new__=interp2app(get_AST_new(Expr)), __init__=interp2app(Expr_init), ) -Expr.typedef.acceptable_as_base_class = False def Pass_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Pass, w_self) @@ -4739,7 +4723,6 @@ __new__=interp2app(get_AST_new(Pass)), __init__=interp2app(Pass_init), ) -Pass.typedef.acceptable_as_base_class = False def Break_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Break, w_self) @@ -4757,7 +4740,6 @@ __new__=interp2app(get_AST_new(Break)), __init__=interp2app(Break_init), ) -Break.typedef.acceptable_as_base_class = False def Continue_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Continue, w_self) @@ -4775,7 +4757,6 @@ __new__=interp2app(get_AST_new(Continue)), __init__=interp2app(Continue_init), ) -Continue.typedef.acceptable_as_base_class = False def expr_get_lineno(space, w_self): if w_self.w_dict is not None: @@ -4825,8 +4806,8 @@ _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(expr_get_lineno, expr_set_lineno, cls=expr), col_offset=typedef.GetSetProperty(expr_get_col_offset, expr_set_col_offset, cls=expr), + __new__=interp2app(get_AST_new(expr)), ) -expr.typedef.acceptable_as_base_class = False def BoolOp_get_op(space, w_self): if w_self.w_dict is not None: @@ -4875,7 +4856,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("BoolOp constructor takes 0 or 2 positional arguments") + w_err = space.wrap("BoolOp constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _BoolOp_field_unroller: @@ -4893,7 +4874,6 @@ __new__=interp2app(get_AST_new(BoolOp)), __init__=interp2app(BoolOp_init), ) -BoolOp.typedef.acceptable_as_base_class = False def BinOp_get_left(space, w_self): if w_self.w_dict is not None: @@ -4965,7 +4945,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("BinOp constructor takes 0 or 3 positional arguments") + w_err = space.wrap("BinOp constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _BinOp_field_unroller: @@ -4984,7 +4964,6 @@ __new__=interp2app(get_AST_new(BinOp)), __init__=interp2app(BinOp_init), ) -BinOp.typedef.acceptable_as_base_class = False def UnaryOp_get_op(space, w_self): if w_self.w_dict is not None: @@ -5035,7 +5014,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("UnaryOp constructor takes 0 or 2 positional arguments") + w_err = space.wrap("UnaryOp constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _UnaryOp_field_unroller: @@ -5053,7 +5032,6 @@ __new__=interp2app(get_AST_new(UnaryOp)), __init__=interp2app(UnaryOp_init), ) -UnaryOp.typedef.acceptable_as_base_class = False def Lambda_get_args(space, w_self): if w_self.w_dict is not None: @@ -5103,7 +5081,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Lambda constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Lambda constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Lambda_field_unroller: @@ -5121,7 +5099,6 @@ __new__=interp2app(get_AST_new(Lambda)), __init__=interp2app(Lambda_init), ) -Lambda.typedef.acceptable_as_base_class = False def IfExp_get_test(space, w_self): if w_self.w_dict is not None: @@ -5192,7 +5169,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("IfExp constructor takes 0 or 3 positional arguments") + w_err = space.wrap("IfExp constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _IfExp_field_unroller: @@ -5211,7 +5188,6 @@ __new__=interp2app(get_AST_new(IfExp)), __init__=interp2app(IfExp_init), ) -IfExp.typedef.acceptable_as_base_class = False def Dict_get_keys(space, w_self): if not w_self.initialization_state & 1: @@ -5257,7 +5233,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Dict constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Dict constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Dict_field_unroller: @@ -5275,7 +5251,6 @@ __new__=interp2app(get_AST_new(Dict)), __init__=interp2app(Dict_init), ) -Dict.typedef.acceptable_as_base_class = False def Set_get_elts(space, w_self): if not w_self.initialization_state & 1: @@ -5302,7 +5277,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Set constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Set constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Set_field_unroller: @@ -5319,7 +5294,6 @@ __new__=interp2app(get_AST_new(Set)), __init__=interp2app(Set_init), ) -Set.typedef.acceptable_as_base_class = False def ListComp_get_elt(space, w_self): if w_self.w_dict is not None: @@ -5367,7 +5341,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("ListComp constructor takes 0 or 2 positional arguments") + w_err = space.wrap("ListComp constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ListComp_field_unroller: @@ -5385,7 +5359,6 @@ __new__=interp2app(get_AST_new(ListComp)), __init__=interp2app(ListComp_init), ) -ListComp.typedef.acceptable_as_base_class = False def SetComp_get_elt(space, w_self): if w_self.w_dict is not None: @@ -5433,7 +5406,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("SetComp constructor takes 0 or 2 positional arguments") + w_err = space.wrap("SetComp constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _SetComp_field_unroller: @@ -5451,7 +5424,6 @@ __new__=interp2app(get_AST_new(SetComp)), __init__=interp2app(SetComp_init), ) -SetComp.typedef.acceptable_as_base_class = False def DictComp_get_key(space, w_self): if w_self.w_dict is not None: @@ -5520,7 +5492,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("DictComp constructor takes 0 or 3 positional arguments") + w_err = space.wrap("DictComp constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _DictComp_field_unroller: @@ -5539,7 +5511,6 @@ __new__=interp2app(get_AST_new(DictComp)), __init__=interp2app(DictComp_init), ) -DictComp.typedef.acceptable_as_base_class = False def GeneratorExp_get_elt(space, w_self): if w_self.w_dict is not None: @@ -5587,7 +5558,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("GeneratorExp constructor takes 0 or 2 positional arguments") + w_err = space.wrap("GeneratorExp constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _GeneratorExp_field_unroller: @@ -5605,7 +5576,6 @@ __new__=interp2app(get_AST_new(GeneratorExp)), __init__=interp2app(GeneratorExp_init), ) -GeneratorExp.typedef.acceptable_as_base_class = False def Yield_get_value(space, w_self): if w_self.w_dict is not None: @@ -5634,7 +5604,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Yield constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Yield constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Yield_field_unroller: @@ -5651,7 +5621,6 @@ __new__=interp2app(get_AST_new(Yield)), __init__=interp2app(Yield_init), ) -Yield.typedef.acceptable_as_base_class = False def Compare_get_left(space, w_self): if w_self.w_dict is not None: @@ -5718,7 +5687,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Compare constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Compare constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Compare_field_unroller: @@ -5737,7 +5706,6 @@ __new__=interp2app(get_AST_new(Compare)), __init__=interp2app(Compare_init), ) -Compare.typedef.acceptable_as_base_class = False def Call_get_func(space, w_self): if w_self.w_dict is not None: @@ -5846,7 +5814,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 5: - w_err = space.wrap("Call constructor takes 0 or 5 positional arguments") + w_err = space.wrap("Call constructor takes either 0 or 5 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Call_field_unroller: @@ -5867,7 +5835,6 @@ __new__=interp2app(get_AST_new(Call)), __init__=interp2app(Call_init), ) -Call.typedef.acceptable_as_base_class = False def Repr_get_value(space, w_self): if w_self.w_dict is not None: @@ -5896,7 +5863,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Repr constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Repr constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Repr_field_unroller: @@ -5913,7 +5880,6 @@ __new__=interp2app(get_AST_new(Repr)), __init__=interp2app(Repr_init), ) -Repr.typedef.acceptable_as_base_class = False def Num_get_n(space, w_self): if w_self.w_dict is not None: @@ -5942,7 +5908,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Num constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Num constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Num_field_unroller: @@ -5959,7 +5925,6 @@ __new__=interp2app(get_AST_new(Num)), __init__=interp2app(Num_init), ) -Num.typedef.acceptable_as_base_class = False def Str_get_s(space, w_self): if w_self.w_dict is not None: @@ -5988,7 +5953,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Str constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Str constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Str_field_unroller: @@ -6005,7 +5970,6 @@ __new__=interp2app(get_AST_new(Str)), __init__=interp2app(Str_init), ) -Str.typedef.acceptable_as_base_class = False def Attribute_get_value(space, w_self): if w_self.w_dict is not None: @@ -6077,7 +6041,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Attribute constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Attribute constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Attribute_field_unroller: @@ -6096,7 +6060,6 @@ __new__=interp2app(get_AST_new(Attribute)), __init__=interp2app(Attribute_init), ) -Attribute.typedef.acceptable_as_base_class = False def Subscript_get_value(space, w_self): if w_self.w_dict is not None: @@ -6168,7 +6131,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Subscript constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Subscript constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Subscript_field_unroller: @@ -6187,7 +6150,6 @@ __new__=interp2app(get_AST_new(Subscript)), __init__=interp2app(Subscript_init), ) -Subscript.typedef.acceptable_as_base_class = False def Name_get_id(space, w_self): if w_self.w_dict is not None: @@ -6238,7 +6200,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Name constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Name constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Name_field_unroller: @@ -6256,7 +6218,6 @@ __new__=interp2app(get_AST_new(Name)), __init__=interp2app(Name_init), ) -Name.typedef.acceptable_as_base_class = False def List_get_elts(space, w_self): if not w_self.initialization_state & 1: @@ -6305,7 +6266,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("List constructor takes 0 or 2 positional arguments") + w_err = space.wrap("List constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _List_field_unroller: @@ -6323,7 +6284,6 @@ __new__=interp2app(get_AST_new(List)), __init__=interp2app(List_init), ) -List.typedef.acceptable_as_base_class = False def Tuple_get_elts(space, w_self): if not w_self.initialization_state & 1: @@ -6372,7 +6332,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("Tuple constructor takes 0 or 2 positional arguments") + w_err = space.wrap("Tuple constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Tuple_field_unroller: @@ -6390,7 +6350,6 @@ __new__=interp2app(get_AST_new(Tuple)), __init__=interp2app(Tuple_init), ) -Tuple.typedef.acceptable_as_base_class = False def Const_get_value(space, w_self): if w_self.w_dict is not None: @@ -6419,7 +6378,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Const constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Const constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Const_field_unroller: @@ -6436,14 +6395,13 @@ __new__=interp2app(get_AST_new(Const)), __init__=interp2app(Const_init), ) -Const.typedef.acceptable_as_base_class = False expr_context.typedef = typedef.TypeDef("expr_context", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(expr_context)), ) -expr_context.typedef.acceptable_as_base_class = False _Load.typedef = typedef.TypeDef("Load", expr_context.typedef, @@ -6451,7 +6409,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Load)), ) -_Load.typedef.acceptable_as_base_class = False _Store.typedef = typedef.TypeDef("Store", expr_context.typedef, @@ -6459,7 +6416,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Store)), ) -_Store.typedef.acceptable_as_base_class = False _Del.typedef = typedef.TypeDef("Del", expr_context.typedef, @@ -6467,7 +6423,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Del)), ) -_Del.typedef.acceptable_as_base_class = False _AugLoad.typedef = typedef.TypeDef("AugLoad", expr_context.typedef, @@ -6475,7 +6430,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_AugLoad)), ) -_AugLoad.typedef.acceptable_as_base_class = False _AugStore.typedef = typedef.TypeDef("AugStore", expr_context.typedef, @@ -6483,7 +6437,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_AugStore)), ) -_AugStore.typedef.acceptable_as_base_class = False _Param.typedef = typedef.TypeDef("Param", expr_context.typedef, @@ -6491,14 +6444,13 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Param)), ) -_Param.typedef.acceptable_as_base_class = False slice.typedef = typedef.TypeDef("slice", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(slice)), ) -slice.typedef.acceptable_as_base_class = False def Ellipsis_init(space, w_self, __args__): w_self = space.descr_self_interp_w(Ellipsis, w_self) @@ -6516,7 +6468,6 @@ __new__=interp2app(get_AST_new(Ellipsis)), __init__=interp2app(Ellipsis_init), ) -Ellipsis.typedef.acceptable_as_base_class = False def Slice_get_lower(space, w_self): if w_self.w_dict is not None: @@ -6587,7 +6538,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("Slice constructor takes 0 or 3 positional arguments") + w_err = space.wrap("Slice constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Slice_field_unroller: @@ -6606,7 +6557,6 @@ __new__=interp2app(get_AST_new(Slice)), __init__=interp2app(Slice_init), ) -Slice.typedef.acceptable_as_base_class = False def ExtSlice_get_dims(space, w_self): if not w_self.initialization_state & 1: @@ -6633,7 +6583,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("ExtSlice constructor takes 0 or 1 positional arguments") + w_err = space.wrap("ExtSlice constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ExtSlice_field_unroller: @@ -6650,7 +6600,6 @@ __new__=interp2app(get_AST_new(ExtSlice)), __init__=interp2app(ExtSlice_init), ) -ExtSlice.typedef.acceptable_as_base_class = False def Index_get_value(space, w_self): if w_self.w_dict is not None: @@ -6679,7 +6628,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 1: - w_err = space.wrap("Index constructor takes 0 or 1 positional arguments") + w_err = space.wrap("Index constructor takes either 0 or 1 positional argument") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _Index_field_unroller: @@ -6696,14 +6645,13 @@ __new__=interp2app(get_AST_new(Index)), __init__=interp2app(Index_init), ) -Index.typedef.acceptable_as_base_class = False boolop.typedef = typedef.TypeDef("boolop", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(boolop)), ) -boolop.typedef.acceptable_as_base_class = False _And.typedef = typedef.TypeDef("And", boolop.typedef, @@ -6711,7 +6659,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_And)), ) -_And.typedef.acceptable_as_base_class = False _Or.typedef = typedef.TypeDef("Or", boolop.typedef, @@ -6719,14 +6666,13 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Or)), ) -_Or.typedef.acceptable_as_base_class = False operator.typedef = typedef.TypeDef("operator", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(operator)), ) -operator.typedef.acceptable_as_base_class = False _Add.typedef = typedef.TypeDef("Add", operator.typedef, @@ -6734,7 +6680,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Add)), ) -_Add.typedef.acceptable_as_base_class = False _Sub.typedef = typedef.TypeDef("Sub", operator.typedef, @@ -6742,7 +6687,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Sub)), ) -_Sub.typedef.acceptable_as_base_class = False _Mult.typedef = typedef.TypeDef("Mult", operator.typedef, @@ -6750,7 +6694,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Mult)), ) -_Mult.typedef.acceptable_as_base_class = False _Div.typedef = typedef.TypeDef("Div", operator.typedef, @@ -6758,7 +6701,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Div)), ) -_Div.typedef.acceptable_as_base_class = False _Mod.typedef = typedef.TypeDef("Mod", operator.typedef, @@ -6766,7 +6708,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Mod)), ) -_Mod.typedef.acceptable_as_base_class = False _Pow.typedef = typedef.TypeDef("Pow", operator.typedef, @@ -6774,7 +6715,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Pow)), ) -_Pow.typedef.acceptable_as_base_class = False _LShift.typedef = typedef.TypeDef("LShift", operator.typedef, @@ -6782,7 +6722,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_LShift)), ) -_LShift.typedef.acceptable_as_base_class = False _RShift.typedef = typedef.TypeDef("RShift", operator.typedef, @@ -6790,7 +6729,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_RShift)), ) -_RShift.typedef.acceptable_as_base_class = False _BitOr.typedef = typedef.TypeDef("BitOr", operator.typedef, @@ -6798,7 +6736,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitOr)), ) -_BitOr.typedef.acceptable_as_base_class = False _BitXor.typedef = typedef.TypeDef("BitXor", operator.typedef, @@ -6806,7 +6743,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitXor)), ) -_BitXor.typedef.acceptable_as_base_class = False _BitAnd.typedef = typedef.TypeDef("BitAnd", operator.typedef, @@ -6814,7 +6750,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_BitAnd)), ) -_BitAnd.typedef.acceptable_as_base_class = False _FloorDiv.typedef = typedef.TypeDef("FloorDiv", operator.typedef, @@ -6822,14 +6757,13 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_FloorDiv)), ) -_FloorDiv.typedef.acceptable_as_base_class = False unaryop.typedef = typedef.TypeDef("unaryop", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(unaryop)), ) -unaryop.typedef.acceptable_as_base_class = False _Invert.typedef = typedef.TypeDef("Invert", unaryop.typedef, @@ -6837,7 +6771,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Invert)), ) -_Invert.typedef.acceptable_as_base_class = False _Not.typedef = typedef.TypeDef("Not", unaryop.typedef, @@ -6845,7 +6778,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Not)), ) -_Not.typedef.acceptable_as_base_class = False _UAdd.typedef = typedef.TypeDef("UAdd", unaryop.typedef, @@ -6853,7 +6785,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_UAdd)), ) -_UAdd.typedef.acceptable_as_base_class = False _USub.typedef = typedef.TypeDef("USub", unaryop.typedef, @@ -6861,14 +6792,13 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_USub)), ) -_USub.typedef.acceptable_as_base_class = False cmpop.typedef = typedef.TypeDef("cmpop", AST.typedef, __module__='_ast', _attributes=_FieldsWrapper([]), + __new__=interp2app(get_AST_new(cmpop)), ) -cmpop.typedef.acceptable_as_base_class = False _Eq.typedef = typedef.TypeDef("Eq", cmpop.typedef, @@ -6876,7 +6806,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Eq)), ) -_Eq.typedef.acceptable_as_base_class = False _NotEq.typedef = typedef.TypeDef("NotEq", cmpop.typedef, @@ -6884,7 +6813,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_NotEq)), ) -_NotEq.typedef.acceptable_as_base_class = False _Lt.typedef = typedef.TypeDef("Lt", cmpop.typedef, @@ -6892,7 +6820,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Lt)), ) -_Lt.typedef.acceptable_as_base_class = False _LtE.typedef = typedef.TypeDef("LtE", cmpop.typedef, @@ -6900,7 +6827,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_LtE)), ) -_LtE.typedef.acceptable_as_base_class = False _Gt.typedef = typedef.TypeDef("Gt", cmpop.typedef, @@ -6908,7 +6834,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Gt)), ) -_Gt.typedef.acceptable_as_base_class = False _GtE.typedef = typedef.TypeDef("GtE", cmpop.typedef, @@ -6916,7 +6841,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_GtE)), ) -_GtE.typedef.acceptable_as_base_class = False _Is.typedef = typedef.TypeDef("Is", cmpop.typedef, @@ -6924,7 +6848,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_Is)), ) -_Is.typedef.acceptable_as_base_class = False _IsNot.typedef = typedef.TypeDef("IsNot", cmpop.typedef, @@ -6932,7 +6855,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_IsNot)), ) -_IsNot.typedef.acceptable_as_base_class = False _In.typedef = typedef.TypeDef("In", cmpop.typedef, @@ -6940,7 +6862,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_In)), ) -_In.typedef.acceptable_as_base_class = False _NotIn.typedef = typedef.TypeDef("NotIn", cmpop.typedef, @@ -6948,7 +6869,6 @@ _fields=_FieldsWrapper([]), __new__=interp2app(get_AST_new(_NotIn)), ) -_NotIn.typedef.acceptable_as_base_class = False def comprehension_get_target(space, w_self): if w_self.w_dict is not None: @@ -7017,7 +6937,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("comprehension constructor takes 0 or 3 positional arguments") + w_err = space.wrap("comprehension constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _comprehension_field_unroller: @@ -7036,7 +6956,6 @@ __new__=interp2app(get_AST_new(comprehension)), __init__=interp2app(comprehension_init), ) -comprehension.typedef.acceptable_as_base_class = False def excepthandler_get_lineno(space, w_self): if w_self.w_dict is not None: @@ -7086,8 +7005,8 @@ _attributes=_FieldsWrapper(['lineno', 'col_offset']), lineno=typedef.GetSetProperty(excepthandler_get_lineno, excepthandler_set_lineno, cls=excepthandler), col_offset=typedef.GetSetProperty(excepthandler_get_col_offset, excepthandler_set_col_offset, cls=excepthandler), + __new__=interp2app(get_AST_new(excepthandler)), ) -excepthandler.typedef.acceptable_as_base_class = False def ExceptHandler_get_type(space, w_self): if w_self.w_dict is not None: @@ -7156,7 +7075,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 3: - w_err = space.wrap("ExceptHandler constructor takes 0 or 3 positional arguments") + w_err = space.wrap("ExceptHandler constructor takes either 0 or 3 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _ExceptHandler_field_unroller: @@ -7175,7 +7094,6 @@ __new__=interp2app(get_AST_new(ExceptHandler)), __init__=interp2app(ExceptHandler_init), ) -ExceptHandler.typedef.acceptable_as_base_class = False def arguments_get_args(space, w_self): if not w_self.initialization_state & 1: @@ -7269,7 +7187,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 4: - w_err = space.wrap("arguments constructor takes 0 or 4 positional arguments") + w_err = space.wrap("arguments constructor takes either 0 or 4 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _arguments_field_unroller: @@ -7289,7 +7207,6 @@ __new__=interp2app(get_AST_new(arguments)), __init__=interp2app(arguments_init), ) -arguments.typedef.acceptable_as_base_class = False def keyword_get_arg(space, w_self): if w_self.w_dict is not None: @@ -7339,7 +7256,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("keyword constructor takes 0 or 2 positional arguments") + w_err = space.wrap("keyword constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _keyword_field_unroller: @@ -7357,7 +7274,6 @@ __new__=interp2app(get_AST_new(keyword)), __init__=interp2app(keyword_init), ) -keyword.typedef.acceptable_as_base_class = False def alias_get_name(space, w_self): if w_self.w_dict is not None: @@ -7410,7 +7326,7 @@ args_w, kwargs_w = __args__.unpack() if args_w: if len(args_w) != 2: - w_err = space.wrap("alias constructor takes 0 or 2 positional arguments") + w_err = space.wrap("alias constructor takes either 0 or 2 positional arguments") raise OperationError(space.w_TypeError, w_err) i = 0 for field in _alias_field_unroller: @@ -7428,5 +7344,4 @@ __new__=interp2app(get_AST_new(alias)), __init__=interp2app(alias_init), ) -alias.typedef.acceptable_as_base_class = False - + diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -135,10 +135,16 @@ def test_abstract_ast_types(self): ast = self.ast - raises(TypeError, ast.expr) - raises(TypeError, ast.AST) - raises(TypeError, type, "X", (ast.AST,), {}) - raises(TypeError, type, "Y", (ast.expr,), {}) + ast.expr() + ast.AST() + class X(ast.AST): + pass + X() + class Y(ast.expr): + pass + Y() + exc = raises(TypeError, ast.AST, 2) + assert exc.value.args[0] == "_ast.AST constructor takes 0 positional arguments" def test_constructor(self): ast = self.ast @@ -165,7 +171,7 @@ assert fr.col_offset == 1 exc = raises(TypeError, ast.Module, 1, 2).value msg = str(exc) - assert msg == "Module constructor takes 0 or 1 positional arguments" + assert msg == "Module constructor takes either 0 or 1 positional argument" ast.Module(nothing=23) def test_future(self): From commits-noreply at bitbucket.org Mon Mar 14 20:22:03 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 14 Mar 2011 20:22:03 +0100 (CET) Subject: [pypy-svn] pypy default: Skip part of the test: PyPy now adds a __dict__ to ast.AST objects. Message-ID: <20110314192203.630CA282BDE@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42639:4f55bea2b969 Date: 2011-03-14 19:54 +0100 http://bitbucket.org/pypy/pypy/changeset/4f55bea2b969/ Log: Skip part of the test: PyPy now adds a __dict__ to ast.AST objects. diff --git a/lib-python/modified-2.7.0/test/test_ast.py b/lib-python/modified-2.7.0/test/test_ast.py --- a/lib-python/modified-2.7.0/test/test_ast.py +++ b/lib-python/modified-2.7.0/test/test_ast.py @@ -195,22 +195,26 @@ self._assertTrueorder(value, parent_pos) def test_AST_objects(self): - x = ast.AST() - try: - x.foobar = 21 - except AttributeError, e: - self.assertEquals(e.args[0], - "'_ast.AST' object has no attribute 'foobar'") - else: - self.assert_(False) + if test_support.check_impl_detail(): + # PyPy also provides a __dict__ to the ast.AST base class. - try: - ast.AST(lineno=2) - except AttributeError, e: - self.assertEquals(e.args[0], - "'_ast.AST' object has no attribute 'lineno'") - else: - self.assert_(False) + x = ast.AST() + try: + x.foobar = 21 + except AttributeError, e: + self.assertEquals(e.args[0], + "'_ast.AST' object has no attribute 'foobar'") + else: + self.assert_(False) + + try: + ast.AST(lineno=2) + except AttributeError, e: + self.assertEquals(e.args[0], + "'_ast.AST' object has no attribute 'lineno'") + else: + self.assert_(False) + try: ast.AST(2) except TypeError, e: From commits-noreply at bitbucket.org Mon Mar 14 20:55:23 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 20:55:23 +0100 (CET) Subject: [pypy-svn] pypy default: (fijal, alex, greg): When you have 2 jitdrivers, if the first one gets into a function of the second one, but the trace gets too long, don't find a function inside the second jitdriver as the longest function. Message-ID: <20110314195523.12CD9282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42640:a7c9fe6b99d1 Date: 2011-03-14 15:54 -0400 http://bitbucket.org/pypy/pypy/changeset/a7c9fe6b99d1/ Log: (fijal, alex, greg): When you have 2 jitdrivers, if the first one gets into a function of the second one, but the trace gets too long, don't find a function inside the second jitdriver as the longest function. diff --git a/pypy/jit/metainterp/test/test_pyjitpl.py b/pypy/jit/metainterp/test/test_pyjitpl.py --- a/pypy/jit/metainterp/test/test_pyjitpl.py +++ b/pypy/jit/metainterp/test/test_pyjitpl.py @@ -18,8 +18,9 @@ class FakeStaticData: cpu = None warmrunnerdesc = None + mainjitcode = portal - metainterp = pyjitpl.MetaInterp(FakeStaticData(), None) + metainterp = pyjitpl.MetaInterp(FakeStaticData(), FakeStaticData()) metainterp.framestack = [] class FakeHistory: operations = [] diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py --- a/pypy/jit/metainterp/history.py +++ b/pypy/jit/metainterp/history.py @@ -522,7 +522,7 @@ def forget_value(self): self.value = 0 - + def clonebox(self): return BoxInt(self.value) @@ -938,6 +938,9 @@ def add_new_loop(self, loop): pass + def record_aborted(self, greenkey): + pass + def view(self, **kwds): pass @@ -952,6 +955,7 @@ def __init__(self): self.loops = [] self.locations = [] + self.aborted_keys = [] def set_history(self, history): self.history = history @@ -974,6 +978,9 @@ def add_new_loop(self, loop): self.loops.append(loop) + def record_aborted(self, greenkey): + self.aborted_keys.append(greenkey) + # test read interface def get_all_loops(self): diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -834,7 +834,7 @@ jcposition, redboxes): resumedescr = compile.ResumeAtPositionDescr() self.capture_resumedata(resumedescr, orgpc) - + any_operation = len(self.metainterp.history.operations) > 0 jitdriver_sd = self.metainterp.staticdata.jitdrivers_sd[jdindex] self.verify_green_args(jitdriver_sd, greenboxes) @@ -852,7 +852,7 @@ "found a loop_header for a JitDriver that does not match " "the following jit_merge_point's") self.metainterp.seen_loop_header_for_jdindex = -1 - + # if not self.metainterp.in_recursion: assert jitdriver_sd is self.metainterp.jitdriver_sd @@ -1417,10 +1417,13 @@ f.setup_call(boxes) raise ChangeFrame + def is_main_jitcode(self, jitcode): + return self.jitdriver_sd is not None and jitcode is self.jitdriver_sd.mainjitcode + def newframe(self, jitcode, greenkey=None): if jitcode.is_portal: self.in_recursion += 1 - if greenkey is not None: + if greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (greenkey, len(self.history.operations))) if len(self.free_frames_list) > 0: @@ -1433,9 +1436,10 @@ def popframe(self): frame = self.framestack.pop() - if frame.jitcode.is_portal: + jitcode = frame.jitcode + if jitcode.is_portal: self.in_recursion -= 1 - if frame.greenkey is not None: + if frame.greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (None, len(self.history.operations))) # we save the freed MIFrames to avoid needing to re-create new @@ -1626,6 +1630,7 @@ warmrunnerstate = self.jitdriver_sd.warmstate if len(self.history.operations) > warmrunnerstate.trace_limit: greenkey_of_huge_function = self.find_biggest_function() + self.staticdata.stats.record_aborted(greenkey_of_huge_function) self.portal_trace_positions = None if greenkey_of_huge_function is not None: warmrunnerstate.disable_noninlinable_function( @@ -1713,7 +1718,7 @@ dont_change_position = True else: dont_change_position = False - try: + try: self.prepare_resume_from_failure(key.guard_opnum, dont_change_position) if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(ABORT_BRIDGE) @@ -1918,7 +1923,7 @@ self.history.inputargs = original_inputargs self.history.operations = self.history.operations[:start] - + self.history.record(rop.JUMP, bridge_arg_boxes[num_green_args:], None) try: target_loop_token = compile.compile_new_bridge(self, diff --git a/pypy/jit/metainterp/test/test_jitdriver.py b/pypy/jit/metainterp/test/test_jitdriver.py --- a/pypy/jit/metainterp/test/test_jitdriver.py +++ b/pypy/jit/metainterp/test/test_jitdriver.py @@ -1,6 +1,7 @@ """Tests for multiple JitDrivers.""" -from pypy.rlib.jit import JitDriver +from pypy.rlib.jit import JitDriver, unroll_safe from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin +from pypy.jit.metainterp.warmspot import get_stats def getloc1(): @@ -99,6 +100,43 @@ # we expect no int_sub, but a residual call self.check_loops(int_sub=0, call=1) + def test_multiple_jits_trace_too_long(self): + myjitdriver1 = JitDriver(greens=["n"], reds=["i", "box"]) + myjitdriver2 = JitDriver(greens=["n"], reds=["i"]) + + class IntBox(object): + def __init__(self, val): + self.val = val + + def loop1(n): + i = 0 + box = IntBox(10) + while i < n: + myjitdriver1.can_enter_jit(n=n, i=i, box=box) + myjitdriver1.jit_merge_point(n=n, i=i, box=box) + i += 1 + loop2(box) + return i + + def loop2(n): + i = 0 + f(10) + while i < n.val: + myjitdriver2.can_enter_jit(n=n, i=i) + myjitdriver2.jit_merge_point(n=n, i=i) + i += 1 + + @unroll_safe + def f(n): + i = 0 + while i < n: + i += 1 + + res = self.meta_interp(loop1, [10], inline=True, trace_limit=6) + assert res == 10 + stats = get_stats() + assert stats.aborted_keys == [None, None] + class TestLLtype(MultipleJitDriversTests, LLJitMixin): pass From commits-noreply at bitbucket.org Mon Mar 14 20:55:23 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 20:55:23 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream. Message-ID: <20110314195523.4801D282BD4@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42641:893a3f84e86b Date: 2011-03-14 15:55 -0400 http://bitbucket.org/pypy/pypy/changeset/893a3f84e86b/ Log: merged upstream. From commits-noreply at bitbucket.org Mon Mar 14 21:42:45 2011 From: commits-noreply at bitbucket.org (migueldvb) Date: Mon, 14 Mar 2011 21:42:45 +0100 (CET) Subject: [pypy-svn] pypy default: Fix assertRaises error in test_iadd function Message-ID: <20110314204245.BE842282B90@codespeak.net> Author: Miguel de Val Borro Branch: Changeset: r42642:4f22ef95b38c Date: 2011-03-14 21:42 +0100 http://bitbucket.org/pypy/pypy/changeset/4f22ef95b38c/ Log: Fix assertRaises error in test_iadd function diff --git a/lib-python/modified-2.7.0/test/list_tests.py b/lib-python/modified-2.7.0/test/list_tests.py --- a/lib-python/modified-2.7.0/test/list_tests.py +++ b/lib-python/modified-2.7.0/test/list_tests.py @@ -476,7 +476,11 @@ u += "eggs" self.assertEqual(u, self.type2test("spameggs")) - self.assertRaises(TypeError, u.__iadd__, None) + def f_iadd(u, x): + u += x + return u + + self.assertRaises(TypeError, f_iadd, u, None) def test_imul(self): u = self.type2test([0, 1]) From commits-noreply at bitbucket.org Mon Mar 14 21:56:03 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 21:56:03 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Add strlen task Message-ID: <20110314205603.7C2AA282B90@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3377:802f10340e75 Date: 2011-03-14 16:55 -0400 http://bitbucket.org/pypy/extradoc/changeset/802f10340e75/ Log: Add strlen task diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -99,6 +99,8 @@ - xxx (find more examples :-) +- strlen should return int >= 0 + BACKEND TASKS ------------- diff --git a/talk/pycon2011/whyslow/examples/tracing.py b/talk/pycon2011/whyslow/examples/tracing.py --- a/talk/pycon2011/whyslow/examples/tracing.py +++ b/talk/pycon2011/whyslow/examples/tracing.py @@ -3,7 +3,7 @@ i = 0 s = 0 while i < 3000: - if i % 3 == 0: + if i & 4: s += 1 else: s += 2 From commits-noreply at bitbucket.org Mon Mar 14 22:02:44 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 22:02:44 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: already done. Message-ID: <20110314210244.AF7D8282B90@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3378:d1bc07934db1 Date: 2011-03-14 17:02 -0400 http://bitbucket.org/pypy/extradoc/changeset/d1bc07934db1/ Log: already done. diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -99,8 +99,6 @@ - xxx (find more examples :-) -- strlen should return int >= 0 - BACKEND TASKS ------------- From commits-noreply at bitbucket.org Mon Mar 14 22:03:06 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 22:03:06 +0100 (CET) Subject: [pypy-svn] pypy default: Add another test for this it was already working. Message-ID: <20110314210306.B948D282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42643:10ee8ee24302 Date: 2011-03-14 17:01 -0400 http://bitbucket.org/pypy/pypy/changeset/10ee8ee24302/ Log: Add another test for this it was already working. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5130,6 +5130,23 @@ """ self.optimize_loop(ops, expected) + def test_strlen_positive(self): + ops = """ + [p0] + i0 = strlen(p0) + i1 = int_ge(i0, 0) + guard_true(i1) [] + i2 = int_gt(i0, -1) + guard_true(i2) [] + jump(p0) + """ + expected = """ + [p0] + i0 = strlen(p0) + jump(p0) + """ + self.optimize_loop(ops, expected) + # ---------- def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): from pypy.jit.metainterp.optimizeopt import string diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -269,15 +269,14 @@ v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) + optimize_STRLEN = optimize_UNICODELEN = optimize_ARRAYLEN_GC + def optimize_STRGETITEM(self, op): self.emit_operation(op) v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) v1.intbound.make_lt(IntUpperBound(256)) - optimize_STRLEN = optimize_ARRAYLEN_GC - optimize_UNICODELEN = optimize_ARRAYLEN_GC - def make_int_lt(self, box1, box2): v1 = self.getvalue(box1) v2 = self.getvalue(box2) From commits-noreply at bitbucket.org Mon Mar 14 22:03:07 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 14 Mar 2011 22:03:07 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110314210307.0132D282BD4@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42644:824f38fc8a6e Date: 2011-03-14 17:02 -0400 http://bitbucket.org/pypy/pypy/changeset/824f38fc8a6e/ Log: merged upstream From commits-noreply at bitbucket.org Mon Mar 14 22:04:56 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:04:56 +0100 (CET) Subject: [pypy-svn] pypy default: Backout cc834a32740a. Message-ID: <20110314210456.1FB0A282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r42645:dd514bd8320c Date: 2011-03-14 15:14 -0400 http://bitbucket.org/pypy/pypy/changeset/dd514bd8320c/ Log: Backout cc834a32740a. diff --git a/pypy/jit/backend/x86/regloc.py b/pypy/jit/backend/x86/regloc.py --- a/pypy/jit/backend/x86/regloc.py +++ b/pypy/jit/backend/x86/regloc.py @@ -80,9 +80,6 @@ else: return rx86.R.names[self.value] - def _getregkey(self): - return (self.value << 1) | self.is_xmm - def is_xmm_location(self): return self.is_xmm From commits-noreply at bitbucket.org Mon Mar 14 22:04:57 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:04:57 +0100 (CET) Subject: [pypy-svn] pypy default: Backend ac8ce2af7ec5. Message-ID: <20110314210457.8B400282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r42646:bcef2739f906 Date: 2011-03-14 15:14 -0400 http://bitbucket.org/pypy/pypy/changeset/bcef2739f906/ Log: Backend ac8ce2af7ec5. diff --git a/pypy/jit/backend/x86/jump.py b/pypy/jit/backend/x86/jump.py --- a/pypy/jit/backend/x86/jump.py +++ b/pypy/jit/backend/x86/jump.py @@ -82,43 +82,16 @@ class ConcreteJumpRemapper(JumpRemapper): - def get_tmp_reg(self, dst): - """Get a temporary register suitable for copying into dst.""" + def get_tmp_reg(self, src): + """Get a temporary register suitable for copying src.""" raise NotImplementedError def move(self, src, dst): if dst.is_memory_reference() and src.is_memory_reference(): - tmpreg = self.get_tmp_reg(dst) + tmpreg = self.get_tmp_reg(src) self.simple_move(src, tmpreg) src = tmpreg self.simple_move(src, dst) def simple_move(self, src, dst): raise NotImplementedError - - -class X86JumpRemapper(ConcreteJumpRemapper): - def __init__(self, assembler, tmp, xmmtmp): - self.assembler = assembler - self.tmpreg = tmp - self.xmmtmpreg = xmmtmp - - def get_tmp_reg(self, dst): - if dst.is_xmm_location(): - return self.xmmtmpreg - else: - return self.tmpreg - - def simple_move(self, src, dst): - self.assembler.regalloc_mov(src, dst) - - def push(self, src): - self.assembler.regalloc_push(src) - - def pop(self, dst): - self.assembler.regalloc_pop(dst) - -def remap_frame_layout(assembler, src_locations, dst_locations, tmp, xmmtmp): - # X86-specific logic - remapper = X86JumpRemapper(assembler, tmp, xmmtmp) - remapper.remap_frame_layout(src_locations, dst_locations) diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -649,10 +649,8 @@ else: get_from_stack.append((floc, True)) - remap_frame_layout(self, - src_locs + xmm_src_locs, - dst_locs + xmm_dst_locs, - X86_64_SCRATCH_REG, X86_64_XMM_SCRATCH_REG) + remap_frame_layout(self, src_locs, dst_locs, X86_64_SCRATCH_REG) + remap_frame_layout(self, xmm_src_locs, xmm_dst_locs, X86_64_XMM_SCRATCH_REG) for i in range(len(get_from_stack)): loc, is_xmm = get_from_stack[i] @@ -981,10 +979,8 @@ self.mc.MOV_sr(i*WORD, loc.value) # Handle register arguments - remap_frame_layout(self, - src_locs + xmm_src_locs, - dst_locs + xmm_dst_locs, - X86_64_SCRATCH_REG, X86_64_XMM_SCRATCH_REG) + remap_frame_layout(self, src_locs, dst_locs, X86_64_SCRATCH_REG) + remap_frame_layout(self, xmm_src_locs, xmm_dst_locs, X86_64_XMM_SCRATCH_REG) self._regalloc.reserve_param(len(pass_on_stack)) self.mc.CALL(x) @@ -1957,7 +1953,7 @@ # arglocs[2:] too, so they are saved on the stack above and # restored below. remap_frame_layout(self, arglocs[:2], [edi, esi], - X86_64_SCRATCH_REG, X86_64_XMM_SCRATCH_REG) + X86_64_SCRATCH_REG) # misaligned stack in the call, but it's ok because the write barrier # is not going to call anything more. Also, this assumes that the diff --git a/pypy/jit/backend/x86/regloc.py b/pypy/jit/backend/x86/regloc.py --- a/pypy/jit/backend/x86/regloc.py +++ b/pypy/jit/backend/x86/regloc.py @@ -36,8 +36,6 @@ def find_unused_reg(self): return eax - def is_xmm_location(self): raise NotImplementedError - class StackLoc(AssemblerLocation): _immutable_ = True def __init__(self, position, ebp_offset, num_words, type): @@ -48,9 +46,6 @@ # One of INT, REF, FLOAT self.type = type - def is_xmm_location(self): - return self.type == FLOAT - def frame_size(self): return self.width // WORD @@ -73,16 +68,12 @@ self.width = 8 else: self.width = WORD - def __repr__(self): if self.is_xmm: return rx86.R.xmmnames[self.value] else: return rx86.R.names[self.value] - def is_xmm_location(self): - return self.is_xmm - def lowest8bits(self): assert not self.is_xmm return RegLoc(rx86.low_byte(self.value), False) diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -11,7 +11,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib import rgc from pypy.jit.backend.llsupport import symbolic -from pypy.jit.backend.x86.jump import remap_frame_layout +#from pypy.jit.backend.x86.jump import remap_frame_layout from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.metainterp.resoperation import rop @@ -1172,19 +1172,16 @@ xmmtmploc = self.xrm.force_allocate_reg(box1, selected_reg=xmmtmp) # Part about non-floats # XXX we don't need a copy, we only just the original list - src_locations1 = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type != FLOAT] assert tmploc not in nonfloatlocs - dst_locations1 = [loc for loc in nonfloatlocs if loc is not None] + dst_locations = [loc for loc in nonfloatlocs if loc is not None] + remap_frame_layout(assembler, src_locations, dst_locations, tmploc) # Part about floats - src_locations2 = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type == FLOAT] - dst_locations2 = [loc for loc in floatlocs if loc is not None] - # Do the remapping - remap_frame_layout(assembler, - src_locations1 + src_locations2, - dst_locations1 + dst_locations2, - tmploc, xmmtmp) + dst_locations = [loc for loc in floatlocs if loc is not None] + remap_frame_layout(assembler, src_locations, dst_locations, xmmtmp) self.rm.possibly_free_var(box) self.xrm.possibly_free_var(box1) self.possibly_free_vars_for_op(op) From commits-noreply at bitbucket.org Mon Mar 14 22:04:59 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:04:59 +0100 (CET) Subject: [pypy-svn] pypy default: Backout 87a0fa635b09. Message-ID: <20110314210459.E5616282BD8@codespeak.net> Author: Armin Rigo Branch: Changeset: r42647:2ab27f57a0b5 Date: 2011-03-14 15:15 -0400 http://bitbucket.org/pypy/pypy/changeset/2ab27f57a0b5/ Log: Backout 87a0fa635b09. diff --git a/pypy/jit/backend/x86/jump.py b/pypy/jit/backend/x86/jump.py --- a/pypy/jit/backend/x86/jump.py +++ b/pypy/jit/backend/x86/jump.py @@ -2,96 +2,70 @@ from pypy.tool.pairtype import extendabletype from pypy.jit.backend.x86.regloc import ImmedLoc, StackLoc +def remap_frame_layout(assembler, src_locations, dst_locations, tmpreg): + pending_dests = len(dst_locations) + srccount = {} # maps dst_locations to how many times the same + # location appears in src_locations + for dst in dst_locations: + key = dst._getregkey() + assert key not in srccount, "duplicate value in dst_locations!" + srccount[key] = 0 + for i in range(len(dst_locations)): + src = src_locations[i] + if isinstance(src, ImmedLoc): + continue + key = src._getregkey() + if key in srccount: + if key == dst_locations[i]._getregkey(): + srccount[key] = -sys.maxint # ignore a move "x = x" + pending_dests -= 1 + else: + srccount[key] += 1 -class JumpRemapper(object): - - def move(self, src, dst): - """Called to generate a move from src to dst.""" - raise NotImplementedError - - def push(self, src): - """Called to temporarily save away the value of src.""" - raise NotImplementedError - - def pop(self, dst): - """Called after push() to restore the saved value into dst.""" - raise NotImplementedError - - def remap_frame_layout(self, src_locations, dst_locations): - pending_dests = len(dst_locations) - srccount = {} # maps dst_locations to how many times the same - # location appears in src_locations - for dst in dst_locations: + while pending_dests > 0: + progress = False + for i in range(len(dst_locations)): + dst = dst_locations[i] key = dst._getregkey() - assert key not in srccount, "duplicate value in dst_locations!" - srccount[key] = 0 - for i in range(len(dst_locations)): - src = src_locations[i] - if isinstance(src, ImmedLoc): - continue - key = src._getregkey() - if key in srccount: - if key == dst_locations[i]._getregkey(): - srccount[key] = -sys.maxint # ignore a move "x = x" - pending_dests -= 1 - else: - srccount[key] += 1 - - while pending_dests > 0: - progress = False + if srccount[key] == 0: + srccount[key] = -1 # means "it's done" + pending_dests -= 1 + src = src_locations[i] + if not isinstance(src, ImmedLoc): + key = src._getregkey() + if key in srccount: + srccount[key] -= 1 + _move(assembler, src, dst, tmpreg) + progress = True + if not progress: + # we are left with only pure disjoint cycles + sources = {} # maps dst_locations to src_locations + for i in range(len(dst_locations)): + src = src_locations[i] + dst = dst_locations[i] + sources[dst._getregkey()] = src + # for i in range(len(dst_locations)): dst = dst_locations[i] - key = dst._getregkey() - if srccount[key] == 0: - srccount[key] = -1 # means "it's done" - pending_dests -= 1 - src = src_locations[i] - if not isinstance(src, ImmedLoc): - key = src._getregkey() - if key in srccount: - srccount[key] -= 1 - self.move(src, dst) - progress = True - if not progress: - # we are left with only pure disjoint cycles - sources = {} # maps dst_locations to src_locations - for i in range(len(dst_locations)): - src = src_locations[i] - dst = dst_locations[i] - sources[dst._getregkey()] = src - # - for i in range(len(dst_locations)): - dst = dst_locations[i] - originalkey = dst._getregkey() - if srccount[originalkey] >= 0: - self.push(dst) - while True: - key = dst._getregkey() - assert srccount[key] == 1 - # ^^^ because we are in a simple cycle - srccount[key] = -1 - pending_dests -= 1 - src = sources[key] - if src._getregkey() == originalkey: - break - self.move(src, dst) - dst = src - self.pop(dst) - assert pending_dests == 0 + originalkey = dst._getregkey() + if srccount[originalkey] >= 0: + assembler.regalloc_push(dst) + while True: + key = dst._getregkey() + assert srccount[key] == 1 + # ^^^ because we are in a simple cycle + srccount[key] = -1 + pending_dests -= 1 + src = sources[key] + if src._getregkey() == originalkey: + break + _move(assembler, src, dst, tmpreg) + dst = src + assembler.regalloc_pop(dst) + assert pending_dests == 0 - -class ConcreteJumpRemapper(JumpRemapper): - - def get_tmp_reg(self, src): - """Get a temporary register suitable for copying src.""" - raise NotImplementedError - - def move(self, src, dst): - if dst.is_memory_reference() and src.is_memory_reference(): - tmpreg = self.get_tmp_reg(src) - self.simple_move(src, tmpreg) - src = tmpreg - self.simple_move(src, dst) - - def simple_move(self, src, dst): - raise NotImplementedError +def _move(assembler, src, dst, tmpreg): + if dst.is_memory_reference() and src.is_memory_reference(): + assembler.regalloc_mov(src, tmpreg) + src = tmpreg + assembler.regalloc_mov(src, dst) diff --git a/pypy/jit/backend/x86/test/test_jump.py b/pypy/jit/backend/x86/test/test_jump.py --- a/pypy/jit/backend/x86/test/test_jump.py +++ b/pypy/jit/backend/x86/test/test_jump.py @@ -1,25 +1,21 @@ from pypy.jit.backend.x86.regloc import * from pypy.jit.backend.x86.regalloc import X86FrameManager -from pypy.jit.backend.x86.jump import ConcreteJumpRemapper +from pypy.jit.backend.x86.jump import remap_frame_layout from pypy.jit.metainterp.history import INT frame_pos = X86FrameManager.frame_pos -class MockAssembler(ConcreteJumpRemapper): - def __init__(self, tmpreg='?'): +class MockAssembler: + def __init__(self): self.ops = [] - self.tmpreg = tmpreg - def get_tmp_reg(self, src): - return self.tmpreg - - def simple_move(self, from_loc, to_loc): + def regalloc_mov(self, from_loc, to_loc): self.ops.append(('mov', from_loc, to_loc)) - def push(self, loc): + def regalloc_push(self, loc): self.ops.append(('push', loc)) - def pop(self, loc): + def regalloc_pop(self, loc): self.ops.append(('pop', loc)) def got(self, expected): @@ -41,32 +37,33 @@ def test_trivial(): assembler = MockAssembler() - assembler.remap_frame_layout([], []) + remap_frame_layout(assembler, [], [], '?') assert assembler.ops == [] - assembler.remap_frame_layout([eax, ebx, ecx, edx, esi, edi], - [eax, ebx, ecx, edx, esi, edi]) + remap_frame_layout(assembler, [eax, ebx, ecx, edx, esi, edi], + [eax, ebx, ecx, edx, esi, edi], '?') assert assembler.ops == [] s8 = frame_pos(1, INT) s12 = frame_pos(31, INT) s20 = frame_pos(6, INT) - assembler.remap_frame_layout([eax, ebx, ecx, s20, s8, edx, s12, esi, edi], - [eax, ebx, ecx, s20, s8, edx, s12, esi, edi]) + remap_frame_layout(assembler, [eax, ebx, ecx, s20, s8, edx, s12, esi, edi], + [eax, ebx, ecx, s20, s8, edx, s12, esi, edi], + '?') assert assembler.ops == [] def test_simple_registers(): assembler = MockAssembler() - assembler.remap_frame_layout([eax, ebx, ecx], [edx, esi, edi]) + remap_frame_layout(assembler, [eax, ebx, ecx], [edx, esi, edi], '?') assert assembler.ops == [('mov', eax, edx), ('mov', ebx, esi), ('mov', ecx, edi)] def test_simple_framelocs(): - assembler = MockAssembler(edx) + assembler = MockAssembler() s8 = frame_pos(0, INT) s12 = frame_pos(13, INT) s20 = frame_pos(20, INT) s24 = frame_pos(221, INT) - assembler.remap_frame_layout([s8, eax, s12], [s20, s24, edi]) + remap_frame_layout(assembler, [s8, eax, s12], [s20, s24, edi], edx) assert assembler.ops == [('mov', s8, edx), ('mov', edx, s20), ('mov', eax, s24), @@ -78,8 +75,8 @@ s12 = frame_pos(12, INT) s20 = frame_pos(19, INT) s24 = frame_pos(1, INT) - assembler.remap_frame_layout([eax, s8, s20, ebx], - [s8, ebx, eax, edi]) + remap_frame_layout(assembler, [eax, s8, s20, ebx], + [s8, ebx, eax, edi], '?') assert assembler.got([('mov', ebx, edi), ('mov', s8, ebx), ('mov', eax, s8), @@ -91,8 +88,8 @@ s12 = frame_pos(12, INT) s20 = frame_pos(19, INT) s24 = frame_pos(1, INT) - assembler.remap_frame_layout([eax, s8, s20, ebx], - [s8, ebx, eax, s20]) + remap_frame_layout(assembler, [eax, s8, s20, ebx], + [s8, ebx, eax, s20], '?') assert assembler.got([('push', s8), ('mov', eax, s8), ('mov', s20, eax), @@ -100,16 +97,17 @@ ('pop', ebx)]) def test_cycle_2(): - assembler = MockAssembler(ecx) + assembler = MockAssembler() s8 = frame_pos(8, INT) s12 = frame_pos(12, INT) s20 = frame_pos(19, INT) s24 = frame_pos(1, INT) s2 = frame_pos(2, INT) s3 = frame_pos(3, INT) - assembler.remap_frame_layout( + remap_frame_layout(assembler, [eax, s8, edi, s20, eax, s20, s24, esi, s2, s3], - [s8, s20, edi, eax, edx, s24, ebx, s12, s3, s2]) + [s8, s20, edi, eax, edx, s24, ebx, s12, s3, s2], + ecx) assert assembler.got([('mov', eax, edx), ('mov', s24, ebx), ('mov', esi, s12), @@ -127,19 +125,19 @@ def test_constants(): assembler = MockAssembler() c3 = imm(3) - assembler.remap_frame_layout([c3], [eax]) + remap_frame_layout(assembler, [c3], [eax], '?') assert assembler.ops == [('mov', c3, eax)] assembler = MockAssembler() s12 = frame_pos(12, INT) - assembler.remap_frame_layout([c3], [s12]) + remap_frame_layout(assembler, [c3], [s12], '?') assert assembler.ops == [('mov', c3, s12)] def test_constants_and_cycle(): - assembler = MockAssembler(edi) + assembler = MockAssembler() c3 = imm(3) s12 = frame_pos(13, INT) - assembler.remap_frame_layout([ebx, c3, s12], - [s12, eax, ebx]) + remap_frame_layout(assembler, [ebx, c3, s12], + [s12, eax, ebx], edi) assert assembler.ops == [('mov', c3, eax), ('push', s12), ('mov', ebx, s12), diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -11,7 +11,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib import rgc from pypy.jit.backend.llsupport import symbolic -#from pypy.jit.backend.x86.jump import remap_frame_layout +from pypy.jit.backend.x86.jump import remap_frame_layout from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.metainterp.resoperation import rop From commits-noreply at bitbucket.org Mon Mar 14 22:05:00 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:05:00 +0100 (CET) Subject: [pypy-svn] pypy default: Write (painfully) a randomized test that fails using two Message-ID: <20110314210500.88E34282BD8@codespeak.net> Author: Armin Rigo Branch: Changeset: r42648:abbf5aea041a Date: 2011-03-14 16:35 -0400 http://bitbucket.org/pypy/pypy/changeset/abbf5aea041a/ Log: Write (painfully) a randomized test that fails using two independent calls to remap_frame_layout(). diff --git a/pypy/jit/backend/x86/test/test_jump.py b/pypy/jit/backend/x86/test/test_jump.py --- a/pypy/jit/backend/x86/test/test_jump.py +++ b/pypy/jit/backend/x86/test/test_jump.py @@ -1,3 +1,4 @@ +import random from pypy.jit.backend.x86.regloc import * from pypy.jit.backend.x86.regalloc import X86FrameManager from pypy.jit.backend.x86.jump import remap_frame_layout @@ -142,3 +143,149 @@ ('push', s12), ('mov', ebx, s12), ('pop', ebx)] + +def test_random_mixed(): + assembler = MockAssembler() + registers1 = [eax, ebx, ecx] + registers2 = [xmm0, xmm1, xmm2] + if IS_X86_32: + XMMWORDS = 2 + elif IS_X86_64: + XMMWORDS = 1 + # + def pick1(): + n = random.randrange(-3, 10) + if n < 0: + return registers1[n] + else: + return frame_pos(n, INT) + def pick2(): + n = random.randrange(-3 , 10 // XMMWORDS) + if n < 0: + return registers2[n] + else: + return frame_pos(n * XMMWORDS, FLOAT) + # + def pick1c(): + n = random.randrange(-2000, 500) + if n >= 0: + return imm(n) + else: + return pick1() + # + def pick_dst(fn, count, seen): + result = [] + while len(result) < count: + x = fn() + keys = [x._getregkey()] + if isinstance(x, StackLoc) and x.width > WORD: + keys.append(keys[0] + WORD) + for key in keys: + if key in seen: + break + else: + for key in keys: + seen[key] = True + result.append(x) + return result + # + def get_state(locations): + regs1 = {} + regs2 = {} + stack = {} + for i, loc in enumerate(locations): + if isinstance(loc, RegLoc): + if loc.is_xmm: + if loc.width > WORD: + newvalue = ('value-xmm-%d' % i, + 'value-xmm-hiword-%d' % i) + else: + newvalue = 'value-xmm-%d' % i + regs2[loc.value] = newvalue + else: + regs1[loc.value] = 'value-int-%d' % i + elif isinstance(loc, StackLoc): + stack[loc.value] = 'value-width%d-%d' % (loc.width, i) + if loc.width > WORD: + stack[loc.value+WORD] = 'value-hiword-%d' % i + else: + assert isinstance(loc, ImmedLoc) + return regs1, regs2, stack + # + for i in range(1000): + seen = {} + src_locations1 = [pick1c() for i in range(5)] + dst_locations1 = pick_dst(pick1, 5, seen) + src_locations2 = [pick2() for i in range(4)] + dst_locations2 = pick_dst(pick2, 4, seen) + assembler = MockAssembler() + #remap_frame_layout_mixed(assembler, + # src_locations1, dst_locations1, edi, + # src_locations2, dst_locations2, xmm7) + remap_frame_layout(assembler, src_locations1, dst_locations1, edi) + remap_frame_layout(assembler, src_locations2, dst_locations2, xmm7) + # + regs1, regs2, stack = get_state(src_locations1 + + src_locations2) + # + def read(loc, expected_width=None): + if expected_width is not None: + assert loc.width == expected_width + if isinstance(loc, RegLoc): + if loc.is_xmm: + return regs2[loc.value] + else: + return regs1[loc.value] + if isinstance(loc, StackLoc): + got = stack[loc.value] + if loc.width > WORD: + got = (got, stack[loc.value+WORD]) + return got + if isinstance(loc, ImmedLoc): + return 'const-%d' % loc.value + assert 0, loc + # + def write(loc, newvalue): + if isinstance(loc, RegLoc): + if loc.is_xmm: + regs2[loc.value] = newvalue + else: + regs1[loc.value] = newvalue + elif isinstance(loc, StackLoc): + if loc.width > WORD: + newval1, newval2 = newvalue + stack[loc.value] = newval1 + stack[loc.value+WORD] = newval2 + else: + stack[loc.value] = newvalue + else: + assert 0, loc + # + src_values1 = [read(loc, WORD) for loc in src_locations1] + src_values2 = [read(loc, 8) for loc in src_locations2] + # + extrapushes = [] + for op in assembler.ops: + if op[0] == 'mov': + src, dst = op[1:] + assert isinstance(src, (RegLoc, StackLoc, ImmedLoc)) + assert isinstance(dst, (RegLoc, StackLoc)) + assert not (isinstance(src, StackLoc) and + isinstance(dst, StackLoc)) + write(dst, read(src)) + elif op[0] == 'push': + src, = op[1:] + assert isinstance(src, (RegLoc, StackLoc)) + extrapushes.append(read(src)) + elif op[0] == 'pop': + dst, = op[1:] + assert isinstance(dst, (RegLoc, StackLoc)) + write(dst, extrapushes.pop()) + else: + assert 0, "unknown op: %r" % (op,) + assert not extrapushes + # + for i, loc in enumerate(dst_locations1): + assert read(loc, WORD) == src_values1[i] + for i, loc in enumerate(dst_locations2): + assert read(loc, 8) == src_values2[i] From commits-noreply at bitbucket.org Mon Mar 14 22:05:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:05:01 +0100 (CET) Subject: [pypy-svn] pypy default: Fix for the test. It's a bit roundabout and inefficient, but on Message-ID: <20110314210501.BA4C4282BDE@codespeak.net> Author: Armin Rigo Branch: Changeset: r42649:87ddbaa99b34 Date: 2011-03-14 16:57 -0400 http://bitbucket.org/pypy/pypy/changeset/87ddbaa99b34/ Log: Fix for the test. It's a bit roundabout and inefficient, but on the other hand it looks like a rare case anyway, otherwise we would have found the bug much earlier. diff --git a/pypy/jit/backend/x86/jump.py b/pypy/jit/backend/x86/jump.py --- a/pypy/jit/backend/x86/jump.py +++ b/pypy/jit/backend/x86/jump.py @@ -69,3 +69,41 @@ assembler.regalloc_mov(src, tmpreg) src = tmpreg assembler.regalloc_mov(src, dst) + +def remap_frame_layout_mixed(assembler, + src_locations1, dst_locations1, tmpreg1, + src_locations2, dst_locations2, tmpreg2): + # find and push the xmm stack locations from src_locations2 that + # are going to be overwritten by dst_locations1 + from pypy.jit.backend.x86.arch import WORD + extrapushes = [] + dst_keys = {} + for loc in dst_locations1: + dst_keys[loc._getregkey()] = None + src_locations2red = [] + dst_locations2red = [] + for i in range(len(src_locations2)): + loc = src_locations2[i] + dstloc = dst_locations2[i] + if isinstance(loc, StackLoc): + key = loc._getregkey() + if (key in dst_keys or (loc.width > WORD and + (key + WORD) in dst_keys)): + assembler.regalloc_push(loc) + extrapushes.append(dstloc) + continue + src_locations2red.append(loc) + dst_locations2red.append(dstloc) + src_locations2 = src_locations2red + dst_locations2 = dst_locations2red + # + # remap the integer and pointer registers and stack locations + remap_frame_layout(assembler, src_locations1, dst_locations1, tmpreg1) + # + # remap the xmm registers and stack locations + remap_frame_layout(assembler, src_locations2, dst_locations2, tmpreg2) + # + # finally, pop the extra xmm stack locations + while len(extrapushes) > 0: + loc = extrapushes.pop() + assembler.regalloc_pop(loc) diff --git a/pypy/jit/backend/x86/test/test_jump.py b/pypy/jit/backend/x86/test/test_jump.py --- a/pypy/jit/backend/x86/test/test_jump.py +++ b/pypy/jit/backend/x86/test/test_jump.py @@ -2,6 +2,7 @@ from pypy.jit.backend.x86.regloc import * from pypy.jit.backend.x86.regalloc import X86FrameManager from pypy.jit.backend.x86.jump import remap_frame_layout +from pypy.jit.backend.x86.jump import remap_frame_layout_mixed from pypy.jit.metainterp.history import INT frame_pos = X86FrameManager.frame_pos @@ -144,6 +145,64 @@ ('mov', ebx, s12), ('pop', ebx)] +def test_mixed(): + assembler = MockAssembler() + s23 = frame_pos(2, FLOAT) # non-conflicting locations + s4 = frame_pos(4, INT) + remap_frame_layout_mixed(assembler, [ebx], [s4], 'tmp', + [s23], [xmm5], 'xmmtmp') + assert assembler.ops == [('mov', ebx, s4), + ('mov', s23, xmm5)] + # + if IS_X86_32: + assembler = MockAssembler() + s23 = frame_pos(2, FLOAT) # gets stored in pos 2 and 3, with value==3 + s3 = frame_pos(3, INT) + remap_frame_layout_mixed(assembler, [ebx], [s3], 'tmp', + [s23], [xmm5], 'xmmtmp') + assert assembler.ops == [('push', s23), + ('mov', ebx, s3), + ('pop', xmm5)] + # + assembler = MockAssembler() + s23 = frame_pos(2, FLOAT) + s2 = frame_pos(2, INT) + remap_frame_layout_mixed(assembler, [ebx], [s2], 'tmp', + [s23], [xmm5], 'xmmtmp') + assert assembler.ops == [('push', s23), + ('mov', ebx, s2), + ('pop', xmm5)] + # + assembler = MockAssembler() + s4 = frame_pos(4, INT) + s45 = frame_pos(4, FLOAT) + s1 = frame_pos(1, INT) + remap_frame_layout_mixed(assembler, [s4], [s1], edi, + [s23], [s45], xmm3) + assert assembler.ops == [('mov', s4, edi), + ('mov', edi, s1), + ('mov', s23, xmm3), + ('mov', xmm3, s45)] + # + assembler = MockAssembler() + s4 = frame_pos(4, INT) + s45 = frame_pos(4, FLOAT) + remap_frame_layout_mixed(assembler, [s4], [s2], edi, + [s23], [s45], xmm3) + assert assembler.ops == [('push', s23), + ('mov', s4, edi), + ('mov', edi, s2), + ('pop', s45)] + # + if IS_X86_32: + assembler = MockAssembler() + remap_frame_layout_mixed(assembler, [s4], [s3], edi, + [s23], [s45], xmm3) + assert assembler.ops == [('push', s23), + ('mov', s4, edi), + ('mov', edi, s3), + ('pop', s45)] + def test_random_mixed(): assembler = MockAssembler() registers1 = [eax, ebx, ecx] @@ -212,18 +271,16 @@ assert isinstance(loc, ImmedLoc) return regs1, regs2, stack # - for i in range(1000): + for i in range(500): seen = {} + src_locations2 = [pick2() for i in range(4)] + dst_locations2 = pick_dst(pick2, 4, seen) src_locations1 = [pick1c() for i in range(5)] dst_locations1 = pick_dst(pick1, 5, seen) - src_locations2 = [pick2() for i in range(4)] - dst_locations2 = pick_dst(pick2, 4, seen) assembler = MockAssembler() - #remap_frame_layout_mixed(assembler, - # src_locations1, dst_locations1, edi, - # src_locations2, dst_locations2, xmm7) - remap_frame_layout(assembler, src_locations1, dst_locations1, edi) - remap_frame_layout(assembler, src_locations2, dst_locations2, xmm7) + remap_frame_layout_mixed(assembler, + src_locations1, dst_locations1, edi, + src_locations2, dst_locations2, xmm7) # regs1, regs2, stack = get_state(src_locations1 + src_locations2) diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -11,7 +11,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib import rgc from pypy.jit.backend.llsupport import symbolic -from pypy.jit.backend.x86.jump import remap_frame_layout +from pypy.jit.backend.x86.jump import remap_frame_layout_mixed from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.metainterp.resoperation import rop @@ -1172,16 +1172,17 @@ xmmtmploc = self.xrm.force_allocate_reg(box1, selected_reg=xmmtmp) # Part about non-floats # XXX we don't need a copy, we only just the original list - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations1 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type != FLOAT] assert tmploc not in nonfloatlocs - dst_locations = [loc for loc in nonfloatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, tmploc) + dst_locations1 = [loc for loc in nonfloatlocs if loc is not None] # Part about floats - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations2 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type == FLOAT] - dst_locations = [loc for loc in floatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, xmmtmp) + dst_locations2 = [loc for loc in floatlocs if loc is not None] + remap_frame_layout_mixed(assembler, + src_locations1, dst_locations1, tmploc, + src_locations2, dst_locations2, xmmtmp) self.rm.possibly_free_var(box) self.xrm.possibly_free_var(box1) self.possibly_free_vars_for_op(op) From commits-noreply at bitbucket.org Mon Mar 14 22:05:03 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:05:03 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110314210503.023EA282BD4@codespeak.net> Author: Armin Rigo Branch: Changeset: r42650:a0dceade17d6 Date: 2011-03-14 17:04 -0400 http://bitbucket.org/pypy/pypy/changeset/a0dceade17d6/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 14 22:20:23 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:20:23 +0100 (CET) Subject: [pypy-svn] pypy default: Fix another corner case (no specific test written): what if we Message-ID: <20110314212023.3D439282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r42651:440b11884461 Date: 2011-03-14 17:19 -0400 http://bitbucket.org/pypy/pypy/changeset/440b11884461/ Log: Fix another corner case (no specific test written): what if we have to move an xmm stack location from (6,7) to (7,8)? The problem is completely avoided by always aligning such locations to multiples of 2. diff --git a/pypy/jit/backend/llsupport/regalloc.py b/pypy/jit/backend/llsupport/regalloc.py --- a/pypy/jit/backend/llsupport/regalloc.py +++ b/pypy/jit/backend/llsupport/regalloc.py @@ -26,16 +26,24 @@ res = self.get(box) if res is not None: return res + size = self.frame_size(box.type) + self.frame_depth += ((-self.frame_depth) & (size-1)) + # ^^^ frame_depth is rounded up to a multiple of 'size', assuming + # that 'size' is a power of two. The reason for doing so is to + # avoid obscure issues in jump.py with stack locations that try + # to move from position (6,7) to position (7,8). newloc = self.frame_pos(self.frame_depth, box.type) self.frame_bindings[box] = newloc - # Objects returned by frame_pos must support frame_size() - self.frame_depth += newloc.frame_size() + self.frame_depth += size return newloc # abstract methods that need to be overwritten for specific assemblers @staticmethod def frame_pos(loc, type): raise NotImplementedError("Purely abstract") + @staticmethod + def frame_size(type): + return 1 class RegisterManager(object): """ Class that keeps track of register allocations diff --git a/pypy/jit/backend/llsupport/test/test_regalloc.py b/pypy/jit/backend/llsupport/test/test_regalloc.py --- a/pypy/jit/backend/llsupport/test/test_regalloc.py +++ b/pypy/jit/backend/llsupport/test/test_regalloc.py @@ -34,16 +34,15 @@ self.pos = pos self.box_type = box_type - def frame_size(self): - if self.box_type == FLOAT: +class TFrameManager(FrameManager): + def frame_pos(self, i, box_type): + return FakeFramePos(i, box_type) + def frame_size(self, box_type): + if box_type == FLOAT: return 2 else: return 1 -class TFrameManager(FrameManager): - def frame_pos(self, i, box_type): - return FakeFramePos(i, box_type) - class MockAsm(object): def __init__(self): self.moves = [] diff --git a/pypy/jit/backend/x86/regloc.py b/pypy/jit/backend/x86/regloc.py --- a/pypy/jit/backend/x86/regloc.py +++ b/pypy/jit/backend/x86/regloc.py @@ -46,9 +46,6 @@ # One of INT, REF, FLOAT self.type = type - def frame_size(self): - return self.width // WORD - def __repr__(self): return '%d(%%ebp)' % (self.value,) diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -110,6 +110,12 @@ return StackLoc(i, get_ebp_ofs(i+1), 2, box_type) else: return StackLoc(i, get_ebp_ofs(i), 1, box_type) + @staticmethod + def frame_size(box_type): + if IS_X86_32 and box_type == FLOAT: + return 2 + else: + return 1 class RegAlloc(object): From commits-noreply at bitbucket.org Mon Mar 14 22:44:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 14 Mar 2011 22:44:01 +0100 (CET) Subject: [pypy-svn] pypy default: Translation fix for test_zrpy_gc. Message-ID: <20110314214401.1C9E9282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r42652:cefd7bb99a50 Date: 2011-03-14 17:43 -0400 http://bitbucket.org/pypy/pypy/changeset/cefd7bb99a50/ Log: Translation fix for test_zrpy_gc. diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -706,6 +706,7 @@ # Note that this may be called recursively; that's why the # allocate() methods must fill in the cache as soon as they # have the object, before they fill its fields. + assert self.virtuals_cache is not None v = self.virtuals_cache[index] if not v: v = self.rd_virtuals[index].allocate(self, index) From commits-noreply at bitbucket.org Mon Mar 14 23:25:58 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 23:25:58 +0100 (CET) Subject: [pypy-svn] pypy default: fix rpy tests I hope Message-ID: <20110314222558.E7AB8282BD4@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42653:80bd259204b5 Date: 2011-03-14 18:25 -0400 http://bitbucket.org/pypy/pypy/changeset/80bd259204b5/ Log: fix rpy tests I hope diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -8,6 +8,7 @@ from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify from pypy.rlib.jit import PARAMETERS +from pypy.rlib.unroll import unrolling_iterable ALL_OPTS = [('intbounds', OptIntBounds), ('rewrite', OptRewrite), @@ -17,6 +18,7 @@ ('ffi', OptFfiCall), ('unroll', None)] # no direct instantiation of unroll +unroll_all_opts = unrolling_iterable(ALL_OPTS) ALL_OPTS_DICT = dict.fromkeys([name for name, _ in ALL_OPTS]) @@ -29,7 +31,7 @@ """ optimizations = [] unroll = 'unroll' in enable_opts - for name, opt in ALL_OPTS: + for name, opt in unroll_all_opts: if name in enable_opts: if opt is not None: o = opt() @@ -61,3 +63,6 @@ pass optimize_loop_1(metainterp_sd, bridge, enable_opts, inline_short_preamble, retraced) + +if __name__ == '__main__': + print ALL_OPTS_NAMES diff --git a/pypy/jit/metainterp/warmstate.py b/pypy/jit/metainterp/warmstate.py --- a/pypy/jit/metainterp/warmstate.py +++ b/pypy/jit/metainterp/warmstate.py @@ -227,6 +227,8 @@ from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT, ALL_OPTS_NAMES d = {} + if NonConstant(False): + value = 'blah' # not a constant '' if value is None: value = ALL_OPTS_NAMES for name in value.split(":"): From commits-noreply at bitbucket.org Mon Mar 14 23:58:20 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 14 Mar 2011 23:58:20 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the test Message-ID: <20110314225820.10DB136C20F@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42654:e837df3968a1 Date: 2011-03-14 18:58 -0400 http://bitbucket.org/pypy/pypy/changeset/e837df3968a1/ Log: Fix the test diff --git a/pypy/jit/backend/x86/test/test_ztranslation.py b/pypy/jit/backend/x86/test/test_ztranslation.py --- a/pypy/jit/backend/x86/test/test_ztranslation.py +++ b/pypy/jit/backend/x86/test/test_ztranslation.py @@ -45,7 +45,7 @@ reds = ['total', 'frame', 'j'], virtualizables = ['frame']) def f(i, j): - for param in unroll_parameters: + for param, _ in unroll_parameters: defl = PARAMETERS[param] jitdriver.set_param(param, defl) jitdriver.set_param("threshold", 3) From commits-noreply at bitbucket.org Tue Mar 15 18:08:14 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 18:08:14 +0100 (CET) Subject: [pypy-svn] pypy default: Trying a bit randomly to fix stackless. Message-ID: <20110315170814.DDF5036C20F@codespeak.net> Author: Armin Rigo Branch: Changeset: r42657:3994e5797386 Date: 2011-03-14 18:54 -0400 http://bitbucket.org/pypy/pypy/changeset/3994e5797386/ Log: Trying a bit randomly to fix stackless. diff --git a/lib_pypy/stackless.py b/lib_pypy/stackless.py --- a/lib_pypy/stackless.py +++ b/lib_pypy/stackless.py @@ -14,9 +14,10 @@ import traceback import sys try: - # If _stackless can be imported then TaskletExit and CoroutineExit are + # If _stackless can be imported then CoroutineExit is # automatically added to the builtins. from _stackless import coroutine, greenlet + TaskletExit = CoroutineExit except ImportError: # we are running from CPython from greenlet import greenlet, GreenletExit TaskletExit = CoroutineExit = GreenletExit diff --git a/pypy/module/_stackless/interp_coroutine.py b/pypy/module/_stackless/interp_coroutine.py --- a/pypy/module/_stackless/interp_coroutine.py +++ b/pypy/module/_stackless/interp_coroutine.py @@ -55,8 +55,8 @@ # Should be moved to interp_stackless.py if it's ever implemented... Currently # used by pypy/lib/stackless.py. -W_TaskletExit = _new_exception('TaskletExit', W_SystemExit, - """Tasklet killed manually.""") +#W_TaskletExit = _new_exception('TaskletExit', W_SystemExit, +# """Tasklet killed manually.""") class AppCoroutine(Coroutine): # XXX, StacklessFlags): @@ -384,14 +384,14 @@ self.w_CoroutineExit) # Should be moved to interp_stackless.py if it's ever implemented... - self.w_TaskletExit = space.gettypefor(W_TaskletExit) - space.setitem( - space.exceptions_module.w_dict, - space.new_interned_str('TaskletExit'), - self.w_TaskletExit) - space.setitem(space.builtin.w_dict, - space.new_interned_str('TaskletExit'), - self.w_TaskletExit) + #self.w_TaskletExit = space.gettypefor(W_TaskletExit) + #space.setitem( + # space.exceptions_module.w_dict, + # space.new_interned_str('TaskletExit'), + # self.w_TaskletExit) + #space.setitem(space.builtin.w_dict, + # space.new_interned_str('TaskletExit'), + # self.w_TaskletExit) def post_install(self): self.current = self.main = AppCoroutine(self.space, state=self) From commits-noreply at bitbucket.org Tue Mar 15 18:08:15 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 18:08:15 +0100 (CET) Subject: [pypy-svn] pypy default: Backed out changeset 3994e5797386 Message-ID: <20110315170815.B763B36C20F@codespeak.net> Author: Armin Rigo Branch: Changeset: r42658:63c5c00fe5fb Date: 2011-03-15 17:23 +0100 http://bitbucket.org/pypy/pypy/changeset/63c5c00fe5fb/ Log: Backed out changeset 3994e5797386 diff --git a/lib_pypy/stackless.py b/lib_pypy/stackless.py --- a/lib_pypy/stackless.py +++ b/lib_pypy/stackless.py @@ -14,10 +14,9 @@ import traceback import sys try: - # If _stackless can be imported then CoroutineExit is + # If _stackless can be imported then TaskletExit and CoroutineExit are # automatically added to the builtins. from _stackless import coroutine, greenlet - TaskletExit = CoroutineExit except ImportError: # we are running from CPython from greenlet import greenlet, GreenletExit TaskletExit = CoroutineExit = GreenletExit diff --git a/pypy/module/_stackless/interp_coroutine.py b/pypy/module/_stackless/interp_coroutine.py --- a/pypy/module/_stackless/interp_coroutine.py +++ b/pypy/module/_stackless/interp_coroutine.py @@ -55,8 +55,8 @@ # Should be moved to interp_stackless.py if it's ever implemented... Currently # used by pypy/lib/stackless.py. -#W_TaskletExit = _new_exception('TaskletExit', W_SystemExit, -# """Tasklet killed manually.""") +W_TaskletExit = _new_exception('TaskletExit', W_SystemExit, + """Tasklet killed manually.""") class AppCoroutine(Coroutine): # XXX, StacklessFlags): @@ -384,14 +384,14 @@ self.w_CoroutineExit) # Should be moved to interp_stackless.py if it's ever implemented... - #self.w_TaskletExit = space.gettypefor(W_TaskletExit) - #space.setitem( - # space.exceptions_module.w_dict, - # space.new_interned_str('TaskletExit'), - # self.w_TaskletExit) - #space.setitem(space.builtin.w_dict, - # space.new_interned_str('TaskletExit'), - # self.w_TaskletExit) + self.w_TaskletExit = space.gettypefor(W_TaskletExit) + space.setitem( + space.exceptions_module.w_dict, + space.new_interned_str('TaskletExit'), + self.w_TaskletExit) + space.setitem(space.builtin.w_dict, + space.new_interned_str('TaskletExit'), + self.w_TaskletExit) def post_install(self): self.current = self.main = AppCoroutine(self.space, state=self) From commits-noreply at bitbucket.org Tue Mar 15 18:08:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 18:08:17 +0100 (CET) Subject: [pypy-svn] pypy default: Finally fix stackless translation, by using a workaround. Message-ID: <20110315170817.5ED8A282B9E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42659:1ea6297c93d7 Date: 2011-03-15 18:07 +0100 http://bitbucket.org/pypy/pypy/changeset/1ea6297c93d7/ Log: Finally fix stackless translation, by using a workaround. diff --git a/pypy/module/_stackless/interp_coroutine.py b/pypy/module/_stackless/interp_coroutine.py --- a/pypy/module/_stackless/interp_coroutine.py +++ b/pypy/module/_stackless/interp_coroutine.py @@ -373,6 +373,11 @@ self.w_tempval = space.w_None self.space = space + # XXX Workaround: for now we need to instantiate these classes + # explicitly for translation to work + W_CoroutineExit(space) + W_TaskletExit(space) + # Exporting new exception to space self.w_CoroutineExit = space.gettypefor(W_CoroutineExit) space.setitem( From commits-noreply at bitbucket.org Tue Mar 15 18:08:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 18:08:17 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110315170817.E1B59282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42660:4187170af92e Date: 2011-03-15 18:08 +0100 http://bitbucket.org/pypy/pypy/changeset/4187170af92e/ Log: merge heads From commits-noreply at bitbucket.org Tue Mar 15 18:10:33 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 18:10:33 +0100 (CET) Subject: [pypy-svn] pypy default: Write an obscure refcounting workaround for a common misusage Message-ID: <20110315171033.433CA36C20F@codespeak.net> Author: Armin Rigo Branch: Changeset: r42661:5e783074ef82 Date: 2011-03-15 13:10 -0400 http://bitbucket.org/pypy/pypy/changeset/5e783074ef82/ Log: Write an obscure refcounting workaround for a common misusage of PyModule_AddObject(). diff --git a/pypy/module/cpyext/test/test_cpyext.py b/pypy/module/cpyext/test/test_cpyext.py --- a/pypy/module/cpyext/test/test_cpyext.py +++ b/pypy/module/cpyext/test/test_cpyext.py @@ -92,6 +92,8 @@ self.frozen_ll2callocations = set(ll2ctypes.ALLOCATED.values()) class LeakCheckingTest(object): + enable_leak_checking = True + @staticmethod def cleanup_references(space): state = space.fromcache(RefcountState) @@ -113,6 +115,10 @@ # check for sane refcnts import gc + if not self.enable_leak_checking: + leakfinder.stop_tracking_allocations(check=False) + return False + leaking = False state = self.space.fromcache(RefcountState) gc.collect() diff --git a/pypy/module/cpyext/test/test_arraymodule.py b/pypy/module/cpyext/test/test_arraymodule.py --- a/pypy/module/cpyext/test/test_arraymodule.py +++ b/pypy/module/cpyext/test/test_arraymodule.py @@ -4,6 +4,8 @@ import sys class AppTestArrayModule(AppTestCpythonExtensionBase): + enable_leak_checking = False + def test_basic(self): module = self.import_module(name='array') arr = module.array('i', [1,2,3]) diff --git a/pypy/module/cpyext/test/test_capsule.py b/pypy/module/cpyext/test/test_capsule.py --- a/pypy/module/cpyext/test/test_capsule.py +++ b/pypy/module/cpyext/test/test_capsule.py @@ -12,6 +12,7 @@ if (PyErr_Occurred()) return NULL; module = PyImport_ImportModule("foo"); PyModule_AddObject(module, "_ptr", capsule); + Py_DECREF(capsule); /* XXX <--- anti-workaround */ Py_DECREF(module); if (PyErr_Occurred()) return NULL; Py_RETURN_NONE; diff --git a/pypy/module/cpyext/src/modsupport.c b/pypy/module/cpyext/src/modsupport.c --- a/pypy/module/cpyext/src/modsupport.c +++ b/pypy/module/cpyext/src/modsupport.c @@ -665,10 +665,12 @@ return tmp; } -int -PyModule_AddObject(PyObject *m, const char *name, PyObject *o) +/* returns -1 in case of error, 0 if a new key was added, 1 if the key + was already there (and replaced) */ +static int +_PyModule_AddObject_NoConsumeRef(PyObject *m, const char *name, PyObject *o) { - PyObject *dict; + PyObject *dict, *prev; if (!PyModule_Check(m)) { PyErr_SetString(PyExc_TypeError, "PyModule_AddObject() needs module as first arg"); @@ -688,32 +690,47 @@ PyModule_GetName(m)); return -1; } + prev = PyDict_GetItemString(dict, name); if (PyDict_SetItemString(dict, name, o)) return -1; - Py_DECREF(o); - return 0; + return prev != NULL; +} + +int +PyModule_AddObject(PyObject *m, const char *name, PyObject *o) +{ + int result = _PyModule_AddObject_NoConsumeRef(m, name, o); + /* XXX WORKAROUND for a common misusage of PyModule_AddObject: + for the common case of adding a new key, we don't consume a + reference, but instead just leak it away. The issue is that + people generally don't realize that this function consumes a + reference, because on CPython the reference is still stored + on the dictionary. */ + if (result != 0) + Py_DECREF(o); + return result < 0 ? -1 : 0; } int PyModule_AddIntConstant(PyObject *m, const char *name, long value) { + int result; PyObject *o = PyInt_FromLong(value); if (!o) return -1; - if (PyModule_AddObject(m, name, o) == 0) - return 0; + result = _PyModule_AddObject_NoConsumeRef(m, name, o); Py_DECREF(o); - return -1; + return result < 0 ? -1 : 0; } int PyModule_AddStringConstant(PyObject *m, const char *name, const char *value) { + int result; PyObject *o = PyString_FromString(value); if (!o) return -1; - if (PyModule_AddObject(m, name, o) == 0) - return 0; + result = _PyModule_AddObject_NoConsumeRef(m, name, o); Py_DECREF(o); - return -1; + return result < 0 ? -1 : 0; } diff --git a/pypy/module/cpyext/test/test_pycobject.py b/pypy/module/cpyext/test/test_pycobject.py --- a/pypy/module/cpyext/test/test_pycobject.py +++ b/pypy/module/cpyext/test/test_pycobject.py @@ -12,6 +12,7 @@ if (PyErr_Occurred()) return NULL; module = PyImport_ImportModule("foo"); PyModule_AddObject(module, "_ptr", pointer); + Py_DECREF(pointer); /* XXX <--- anti-workaround */ Py_DECREF(module); if (PyErr_Occurred()) return NULL; Py_RETURN_NONE; From commits-noreply at bitbucket.org Tue Mar 15 18:23:25 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:25 +0100 (CET) Subject: [pypy-svn] pypy default: port this test from test_pypy_c Message-ID: <20110315172325.C89AD282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42662:dbdd0dbbdde0 Date: 2011-03-15 12:16 +0100 http://bitbucket.org/pypy/pypy/changeset/dbdd0dbbdde0/ Log: port this test from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -276,6 +276,24 @@ jump(p0, p1, p2, p3, p4, p5, i20, p7, i17, i9, p10, p11, p12, p13, descr=) """) + def test_default_and_kw(self): + def main(n): + def f(i, j=1): + return i + j + # + i = 0 + while i < n: + i = f(f(i), j=1) # ID: call + a = 0 + return i + # + log = self.run(main, [1000], threshold=400) + assert log.result == 1000 + loop, = log.loops_by_id('call') + assert loop.match_by_id('call', """ + i14 = force_token() + i16 = force_token() + """) def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Tue Mar 15 18:23:26 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:26 +0100 (CET) Subject: [pypy-svn] pypy default: port test_kwargs from test_pypy_c Message-ID: <20110315172326.5F557282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42663:f4c727a84552 Date: 2011-03-15 12:27 +0100 http://bitbucket.org/pypy/pypy/changeset/f4c727a84552/ Log: port test_kwargs from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -295,6 +295,28 @@ i16 = force_token() """) + def test_kwargs(self): + # this is not a very precise test, could be improved + def main(x): + def g(**args): + return len(args) + # + s = 0 + d = {} + for i in range(x): + s += g(**d) # ID: call + d[str(i)] = i + if i % 100 == 99: + d = {} + return s + # + log = self.run(main, [1000], threshold=400) + assert log.result == 49500 + loop, = log.loops_by_id('call') + ops = log.opnames(loop.ops_by_id('call')) + guards = [ops for ops in ops if ops.startswith('guard')] + assert len(guards) <= 5 + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Tue Mar 15 18:23:27 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:27 +0100 (CET) Subject: [pypy-svn] pypy default: port test_stararg_virtual Message-ID: <20110315172327.204FE282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42664:9db616a6be51 Date: 2011-03-15 15:15 +0100 http://bitbucket.org/pypy/pypy/changeset/9db616a6be51/ Log: port test_stararg_virtual diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -347,6 +347,7 @@ print '@' * 40 print "Loops don't match" print "=================" + print e.args print e.msg print print "Got:" diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -317,6 +317,60 @@ guards = [ops for ops in ops if ops.startswith('guard')] assert len(guards) <= 5 + def test_stararg_virtual(self): + def main(x): + def g(*args): + return len(args) + def h(a, b, c): + return c + # + s = 0 + for i in range(x): + l = [i, x, 2] + s += g(*l) # ID: g1 + s += h(*l) # ID: h1 + s += g(i, x, 2) # ID: g2 + a = 0 + for i in range(x): + l = [x, 2] + s += g(i, *l) # ID: g3 + s += h(i, *l) # ID: h2 + a = 0 + return s + # + log = self.run(main, [1000], threshold=400) + assert log.result == 13000 + loop0, = log.loops_by_id('g1') + assert loop0.match_by_id('g1', """ + i20 = force_token() + setfield_gc(p4, i19, descr=<.*W_AbstractSeqIterObject.inst_index .*>) + i22 = int_add_ovf(i8, 3) + guard_no_overflow(descr=) + """) + assert loop0.match_by_id('h1', """ + i20 = force_token() + i22 = int_add_ovf(i8, 2) + guard_no_overflow(descr=) + """) + assert loop0.match_by_id('g2', """ + i27 = force_token() + i29 = int_add_ovf(i26, 3) + guard_no_overflow(descr=) + """) + # + loop1, = log.loops_by_id('g3') + assert loop1.match_by_id('g3', """ + i21 = force_token() + setfield_gc(p4, i20, descr=<.* .*W_AbstractSeqIterObject.inst_index .*>) + i23 = int_add_ovf(i9, 3) + guard_no_overflow(descr=) + """) + assert loop1.match_by_id('h2', """ + i25 = force_token() + i27 = int_add_ovf(i23, 2) + guard_no_overflow(descr=) + """) + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Tue Mar 15 18:23:27 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:27 +0100 (CET) Subject: [pypy-svn] pypy default: port test_stararg from test_pypy_c Message-ID: <20110315172327.E2530282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42665:2a86861e21d2 Date: 2011-03-15 15:22 +0100 http://bitbucket.org/pypy/pypy/changeset/2a86861e21d2/ Log: port test_stararg from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -371,6 +371,33 @@ guard_no_overflow(descr=) """) + + def test_stararg(self): + def main(x): + def g(*args): + return args[-1] + def h(*args): + return len(args) + # + s = 0 + l = [] + i = 0 + while i < x: + l.append(1) + s += g(*l) # ID: g + i = h(*l) # ID: h + a = 0 + return s + # + log = self.run(main, [1000], threshold=400) + assert log.result == 1000 + loop, = log.loops_by_id('g') + ops_g = log.opnames(loop.ops_by_id('g')) + ops_h = log.opnames(loop.ops_by_id('h')) + ops = ops_g + ops_h + assert 'new_with_vtable' not in ops + assert 'call_may_force' not in ops + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Tue Mar 15 18:23:29 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:29 +0100 (CET) Subject: [pypy-svn] pypy default: port test_virtual_instance from test_pypy_c Message-ID: <20110315172329.183CA282BD8@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42666:30f0ecace0a1 Date: 2011-03-15 15:56 +0100 http://bitbucket.org/pypy/pypy/changeset/30f0ecace0a1/ Log: port test_virtual_instance from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -371,7 +371,6 @@ guard_no_overflow(descr=) """) - def test_stararg(self): def main(x): def g(*args): @@ -398,6 +397,33 @@ assert 'new_with_vtable' not in ops assert 'call_may_force' not in ops + def test_virtual_instance(self): + def main(n): + class A(object): + pass + # + i = 0 + while i < n: + a = A() + assert isinstance(a, A) + assert not isinstance(a, int) + a.x = 2 + i = i + a.x + return i + # + log = self.run(main, [1000], threshold = 400) + assert log.result == 1000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i7 = int_lt(i5, i6) + guard_true(i7, descr=) + i9 = int_add_ovf(i5, 2) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, i9, i6, descr=) + """) + + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Tue Mar 15 18:23:31 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:31 +0100 (CET) Subject: [pypy-svn] pypy default: port test_load_attr Message-ID: <20110315172331.10931282BD8@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42667:879737899263 Date: 2011-03-15 15:59 +0100 http://bitbucket.org/pypy/pypy/changeset/879737899263/ Log: port test_load_attr diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -423,6 +423,30 @@ jump(p0, p1, p2, p3, p4, i9, i6, descr=) """) + def test_load_attr(self): + src = ''' + class A(object): + pass + a = A() + a.x = 2 + def main(n): + i = 0 + while i < n: + i = i + a.x + return i + ''' + log = self.run(src, [1000], threshold=400) + assert log.result == 1000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i9 = int_lt(i5, i6) + guard_true(i9, descr=) + i10 = int_add_ovf(i5, i7) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, i10, i6, i7, p8, descr=) + """) + def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Tue Mar 15 18:23:31 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:31 +0100 (CET) Subject: [pypy-svn] pypy default: test_mixed_type_loop Message-ID: <20110315172331.D1315282BD8@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42668:d0b226f21900 Date: 2011-03-15 16:08 +0100 http://bitbucket.org/pypy/pypy/changeset/d0b226f21900/ Log: test_mixed_type_loop diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -447,6 +447,25 @@ jump(p0, p1, p2, p3, p4, i10, i6, i7, p8, descr=) """) + def test_mixed_type_loop(self): + def main(n): + i = 0.0 + j = 2 + while i < n: + i = j + i + return i + # + log = self.run(main, [1000], threshold=400) + assert log.result == 1000.0 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i9 = float_lt(f5, f7) + guard_true(i9, descr=) + f10 = float_add(f8, f5) + --TICK-- + jump(p0, p1, p2, p3, p4, f10, p6, f7, f8, descr=) + """) + def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Tue Mar 15 18:23:34 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:34 +0100 (CET) Subject: [pypy-svn] pypy default: port one more test from test_pypy_c Message-ID: <20110315172334.1C8F6282C18@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42669:d8c7e18303fa Date: 2011-03-15 17:45 +0100 http://bitbucket.org/pypy/pypy/changeset/d8c7e18303fa/ Log: port one more test from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -466,6 +466,26 @@ jump(p0, p1, p2, p3, p4, f10, p6, f7, f8, descr=) """) + def test_call_builtin_function(self): + def main(n): + i = 2 + l = [] + while i < n: + i += 1 + l.append(i) # ID: append + a = 0 + return i, len(l) + # + log = self.run(main, [1000], threshold=400) + assert log.result == (1000, 998) + loop, = log.loops_by_filename(self.filepath) + assert loop.match_by_id('append', """ + p14 = new_with_vtable(ConstClass(W_IntObject)) + setfield_gc(p14, i12, descr=) + call(ConstClass(ll_append__listPtr_objectPtr), p8, p14, descr=...) + guard_no_exception(descr=) + """) + def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Tue Mar 15 18:23:36 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:36 +0100 (CET) Subject: [pypy-svn] pypy default: test_range_iter Message-ID: <20110315172336.C0812282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42670:709f88579427 Date: 2011-03-15 18:11 +0100 http://bitbucket.org/pypy/pypy/changeset/709f88579427/ Log: test_range_iter diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -158,8 +158,8 @@ matcher = OpMatcher(ops, src=self.format_ops()) return matcher.match(expected_src) - def match_by_id(self, id, expected_src): - ops = list(self.ops_by_id(id)) + def match_by_id(self, id, expected_src, **kwds): + ops = list(self.ops_by_id(id, *kwds)) matcher = OpMatcher(ops, src=self.format_ops(id)) return matcher.match(expected_src) diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -486,6 +486,36 @@ guard_no_exception(descr=) """) + def test_range_iter(self): + def main(n): + def g(n): + return range(n) + s = 0 + for i in range(n): # ID: for + tmp = g(n) + s += tmp[i] # ID: getitem + return s + # + log = self.run(main, [1000], threshold=400) + assert log.result == 1000 * 999 / 2 + loop, = log.loops_by_filename(self.filepath) + loop.match_by_id('getitem', opcode='BINARY_SUBSCR', expected_src=""" + i43 = int_lt(i25, 0) + guard_false(i43, descr=) + i44 = int_ge(i25, i39) + guard_false(i44, descr=) + i45 = int_mul(i25, i33) + """) + loop.match_by_id('for', opcode='FOR_ITER', expected_src=""" + i23 = int_ge(i11, i12) + guard_false(i23, descr=) + i24 = int_mul(i11, i14) + i25 = int_add(i15, i24) + i27 = int_add(i11, 1) + # even if it's a the end of the loop, the jump still belongs to + # the FOR_ITER opcode + jump(p0, p1, p2, p3, p4, p5, p6, i46, i25, i39, i33, i27, i12, p13, i14, i15, p16, i17, i18, p19, p20, i21, i22, descr=) + """) def test_reraise(self): def f(n): From commits-noreply at bitbucket.org Tue Mar 15 18:23:37 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:37 +0100 (CET) Subject: [pypy-svn] pypy default: unskip these tests, they pass Message-ID: <20110315172337.DFC50282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42671:e6ec5f038dec Date: 2011-03-15 18:22 +0100 http://bitbucket.org/pypy/pypy/changeset/e6ec5f038dec/ Log: unskip these tests, they pass diff --git a/pypy/module/pypyjit/test/test_pypy_c.py b/pypy/module/pypyjit/test/test_pypy_c.py --- a/pypy/module/pypyjit/test/test_pypy_c.py +++ b/pypy/module/pypyjit/test/test_pypy_c.py @@ -636,7 +636,6 @@ ] def test_exception_inside_loop_1(self): - py.test.skip("exceptions: in-progress") self.run_source(''' def main(n): while n: @@ -646,7 +645,7 @@ pass n -= 1 return n - ''', + ''', 33, ([30], 0)) bytecode, = self.get_by_bytecode("SETUP_EXCEPT") @@ -658,7 +657,6 @@ assert not bytecode.get_opnames() def test_exception_inside_loop_2(self): - py.test.skip("exceptions: in-progress") self.run_source(''' def g(n): raise ValueError(n) @@ -672,7 +670,7 @@ pass n -= 1 return n - ''', + ''', 51, ([30], 0)) bytecode, = self.get_by_bytecode("RAISE_VARARGS") From commits-noreply at bitbucket.org Tue Mar 15 18:23:38 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 18:23:38 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110315172338.41F45282BD8@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42672:4dea4f6ce4d5 Date: 2011-03-15 18:22 +0100 http://bitbucket.org/pypy/pypy/changeset/4dea4f6ce4d5/ Log: merge heads From commits-noreply at bitbucket.org Tue Mar 15 19:10:19 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Tue, 15 Mar 2011 19:10:19 +0100 (CET) Subject: [pypy-svn] pypy default: undo most of 04257ccf2c0a to hopefully fix darwin translation Message-ID: <20110315181019.4329236C20E@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42673:8f177a7e05f5 Date: 2011-03-15 14:10 -0400 http://bitbucket.org/pypy/pypy/changeset/8f177a7e05f5/ Log: undo most of 04257ccf2c0a to hopefully fix darwin translation diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -1,7 +1,6 @@ """ Utilities to get environ variables and platform-specific memory-related values. """ -from __future__ import with_statement import os, sys from pypy.rlib.rarithmetic import r_uint from pypy.rlib.debug import debug_print, debug_start, debug_stop @@ -193,8 +192,10 @@ sandboxsafe=True) def get_darwin_cache_size(cache_key): - with lltype.scoped_alloc(rffi.LONGLONGP.TO, 1) as cache_p: - with lltype.scoped_alloc(rffi.SIZE_TP.TO, 1) as len_p: + cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') + try: + len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') + try: size = rffi.sizeof(rffi.LONGLONG) cache_p[0] = rffi.cast(rffi.LONGLONG, 0) len_p[0] = rffi.cast(rffi.SIZE_T, size) @@ -211,6 +212,10 @@ if rffi.cast(rffi.LONGLONG, cache) != cache_p[0]: cache = 0 # overflow! return cache + finally: + lltype.free(len_p, flavor='raw') + finally: + lltype.free(cache_p, flavor='raw') def get_L2cache_darwin(): From commits-noreply at bitbucket.org Tue Mar 15 19:18:38 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:18:38 +0100 (CET) Subject: [pypy-svn] jitviewer default: Display purity of getfield Message-ID: <20110315181838.05B6D36C20E@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r108:d5e920612801 Date: 2011-03-15 14:17 -0400 http://bitbucket.org/pypy/jitviewer/changeset/d5e920612801/ Log: Display purity of getfield diff --git a/_jitviewer/parser.py b/_jitviewer/parser.py --- a/_jitviewer/parser.py +++ b/_jitviewer/parser.py @@ -92,7 +92,9 @@ obj = self.getarg(0) return '%s = ((%s.%s)%s).%s' % (self.getres(), namespace, classname, obj, field) - repr_getfield_gc_pure = repr_getfield_gc + + def repr_getfield_gc_pure(self): + return self.repr_getfield_gc() + " [pure]" def repr_setfield_raw(self): name, field = self.descr.split(' ')[1].rsplit('.', 1) From commits-noreply at bitbucket.org Tue Mar 15 19:18:38 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:18:38 +0100 (CET) Subject: [pypy-svn] jitviewer default: merge Message-ID: <20110315181838.3885F282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r109:eac7398594e1 Date: 2011-03-15 14:18 -0400 http://bitbucket.org/pypy/jitviewer/changeset/eac7398594e1/ Log: merge From commits-noreply at bitbucket.org Tue Mar 15 19:20:49 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 15 Mar 2011 19:20:49 +0100 (CET) Subject: [pypy-svn] pypy default: Fix one test in cpyext Message-ID: <20110315182049.D55A536C20E@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42674:0fd9a5d918f4 Date: 2011-03-15 18:42 +0100 http://bitbucket.org/pypy/pypy/changeset/0fd9a5d918f4/ Log: Fix one test in cpyext diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -288,6 +288,7 @@ W_TypeObject.__init__(self, space, extension_name, bases_w or [space.w_object], dict_w) self.flag_cpytype = True + self.flag_heaptype = False @bootstrap_function def init_typeobject(space): From commits-noreply at bitbucket.org Tue Mar 15 19:20:50 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 15 Mar 2011 19:20:50 +0100 (CET) Subject: [pypy-svn] pypy default: Rework borrowed references, fixes a crash when an object is borrowed from two different containers Message-ID: <20110315182050.9F8DB36C20E@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42675:d4aad1653f8b Date: 2011-03-15 18:54 +0100 http://bitbucket.org/pypy/pypy/changeset/d4aad1653f8b/ Log: Rework borrowed references, fixes a crash when an object is borrowed from two different containers diff --git a/pypy/module/cpyext/pyobject.py b/pypy/module/cpyext/pyobject.py --- a/pypy/module/cpyext/pyobject.py +++ b/pypy/module/cpyext/pyobject.py @@ -144,14 +144,11 @@ # { w_container -> { w_containee -> None } } # the None entry manages references borrowed during a call to # generic_cpy_call() - self.borrowed_objects = {} - # { addr of containee -> None } # For tests self.non_heaptypes_w = [] def _freeze_(self): - assert not self.borrowed_objects assert self.borrow_mapping == {None: {}} self.py_objects_r2w.clear() # is not valid anymore after translation return False @@ -187,22 +184,19 @@ """ ref = make_ref(self.space, w_borrowed) obj_ptr = rffi.cast(ADDR, ref) - if obj_ptr not in self.borrowed_objects: - # borrowed_objects owns the reference - self.borrowed_objects[obj_ptr] = None - else: - Py_DecRef(self.space, ref) # already in borrowed list borrowees = self.borrow_mapping.setdefault(w_container, {}) - borrowees[w_borrowed] = None + if w_borrowed in borrowees: + Py_DecRef(self.space, w_borrowed) # cancel incref from make_ref() + else: + borrowees[w_borrowed] = None + return ref def reset_borrowed_references(self): "Used in tests" - while self.borrowed_objects: - addr, _ = self.borrowed_objects.popitem() - w_obj = self.py_objects_r2w[addr] - Py_DecRef(self.space, w_obj) + for w_container, w_borrowed in self.borrow_mapping.items(): + Py_DecRef(self.space, w_borrowed) self.borrow_mapping = {None: {}} def delete_borrower(self, w_obj): @@ -232,17 +226,10 @@ ref = self.py_objects_w2r.get(w_obj, lltype.nullptr(PyObject.TO)) if not ref: if DEBUG_REFCOUNT: - print >>sys.stderr, "Borrowed object is already gone:", \ - hex(containee) + print >>sys.stderr, "Borrowed object is already gone!" return - containee_ptr = rffi.cast(ADDR, ref) - try: - del self.borrowed_objects[containee_ptr] - except KeyError: - pass - else: - Py_DecRef(self.space, ref) + Py_DecRef(self.space, ref) class InvalidPointerException(Exception): pass @@ -290,7 +277,6 @@ if not replace: assert w_obj not in state.py_objects_w2r assert ptr not in state.py_objects_r2w - assert ptr not in state.borrowed_objects state.py_objects_w2r[w_obj] = py_obj if ptr: # init_typeobject() bootstraps with NULL references state.py_objects_r2w[ptr] = w_obj diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -39,7 +39,6 @@ assert module.test_borrowing() # the test should not leak def test_borrow_destroy(self): - skip("FIXME") module = self.import_extension('foo', [ ("test_borrow_destroy", "METH_NOARGS", """ From commits-noreply at bitbucket.org Tue Mar 15 19:20:51 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 15 Mar 2011 19:20:51 +0100 (CET) Subject: [pypy-svn] pypy default: Add a passing test about borrowing the same object twice from the same container. Message-ID: <20110315182051.2D32A36C20E@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42676:d8724d17e24f Date: 2011-03-15 19:18 +0100 http://bitbucket.org/pypy/pypy/changeset/d8724d17e24f/ Log: Add a passing test about borrowing the same object twice from the same container. diff --git a/pypy/module/cpyext/test/test_borrow.py b/pypy/module/cpyext/test/test_borrow.py --- a/pypy/module/cpyext/test/test_borrow.py +++ b/pypy/module/cpyext/test/test_borrow.py @@ -58,3 +58,22 @@ """), ]) assert module.test_borrow_destroy() == 42 + + def test_double_borrow(self): + module = self.import_extension('foo', [ + ("run", "METH_NOARGS", + """ + PyObject *t = PyTuple_New(1); + PyObject *s = PyRun_String("set()", Py_eval_input, + Py_None, Py_None); + PyObject *w = PyWeakref_NewRef(s, Py_None); + PyTuple_SetItem(t, 0, s); + PyTuple_GetItem(t, 0); + PyTuple_GetItem(t, 0); + Py_DECREF(t); + return w; + """), + ]) + wr = module.run() + # check that the set() object was deallocated + assert wr() is None From commits-noreply at bitbucket.org Tue Mar 15 19:25:56 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:25:56 +0100 (CET) Subject: [pypy-svn] jitviewer default: (arigo, alex, fijal) Improve jump Message-ID: <20110315182556.AA13436C20E@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r110:6553b920524a Date: 2011-03-15 14:25 -0400 http://bitbucket.org/pypy/jitviewer/changeset/6553b920524a/ Log: (arigo, alex, fijal) Improve jump diff --git a/_jitviewer/parser.py b/_jitviewer/parser.py --- a/_jitviewer/parser.py +++ b/_jitviewer/parser.py @@ -104,6 +104,10 @@ name, field = self.descr.split(' ')[1].rsplit('.', 1) return '((%s)%s).%s = %s' % (name, self.getarg(0), field, self.getarg(1)) + def repr_jump(self): + no = int(re.search("\d+", self.descr).group(0)) + return ("" % no + + self.repr() + "") class ParserWithHtmlRepr(parser.SimpleParser): Op = OpHtml From commits-noreply at bitbucket.org Tue Mar 15 19:26:51 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 19:26:51 +0100 (CET) Subject: [pypy-svn] pypy default: port yet another test from test_pypy_c Message-ID: <20110315182651.EADC736C20E@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42677:9d570283d667 Date: 2011-03-15 19:22 +0100 http://bitbucket.org/pypy/pypy/changeset/9d570283d667/ Log: port yet another test from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -242,10 +242,19 @@ ticker0 = getfield_raw(ticker_address, descr=) ticker1 = int_sub(ticker0, 1) setfield_raw(ticker_address, ticker1, descr=) - ticker_cond = int_lt(ticker1, 0) - guard_false(ticker_cond, descr=...) + ticker_cond0 = int_lt(ticker1, 0) + guard_false(ticker_cond0, descr=...) """ src = src.replace('--TICK--', ticker_check) + # + # this is the ticker check generated in PyFrame.handle_operation_error + exc_ticker_check = """ + ticker2 = getfield_raw(ticker_address, descr=) + setfield_gc(_, _, descr=) + ticker_cond1 = int_lt(ticker2, 0) + guard_false(ticker_cond1, descr=...) + """ + src = src.replace('--EXC-TICK--', exc_ticker_check) return src @classmethod diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -517,6 +517,30 @@ jump(p0, p1, p2, p3, p4, p5, p6, i46, i25, i39, i33, i27, i12, p13, i14, i15, p16, i17, i18, p19, p20, i21, i22, descr=) """) + + def test_exception_inside_loop_1(self): + def main(n): + while n: + try: + raise ValueError + except ValueError: + pass + n -= 1 + return n + # + log = self.run(main, [1000], threshold=400) + assert log.result == 0 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i5 = int_is_true(i3) + guard_true(i5, descr=) + --EXC-TICK-- + i12 = int_sub_ovf(i3, 1) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, i12, p4, descr=) + """) + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Tue Mar 15 19:26:52 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Tue, 15 Mar 2011 19:26:52 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110315182652.397F2282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42678:1238da66f9fc Date: 2011-03-15 19:26 +0100 http://bitbucket.org/pypy/pypy/changeset/1238da66f9fc/ Log: merge heads From commits-noreply at bitbucket.org Tue Mar 15 19:40:44 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:40:44 +0100 (CET) Subject: [pypy-svn] pypy default: (arigo, fijal, alex) Improve parser a bit Message-ID: <20110315184044.71C4D282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42679:90095e723230 Date: 2011-03-15 14:39 -0400 http://bitbucket.org/pypy/pypy/changeset/90095e723230/ Log: (arigo, fijal, alex) Improve parser a bit diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -147,8 +147,9 @@ # factory method TraceForOpcode = TraceForOpcode - def __init__(self, chunks, path, storage): + def __init__(self, chunks, path, storage, inputargs): self.path = path + self.inputargs = inputargs self.chunks = chunks for chunk in self.chunks: if chunk.filename is not None: @@ -160,7 +161,7 @@ self.storage = storage @classmethod - def from_operations(cls, operations, storage, limit=None): + def from_operations(cls, operations, storage, limit=None, inputargs=''): """ Slice given operation list into a chain of TraceForOpcode chunks. Also detect inlined functions and make them Function """ @@ -196,11 +197,11 @@ # wrap stack back up if not stack: # no ops whatsoever - return cls([], getpath(stack), storage) + return cls([], getpath(stack), storage, inputargs) while True: next = stack.pop() if not stack: - return cls(next, getpath(stack), storage) + return cls(next, getpath(stack), storage, inputargs) stack[-1].append(cls(next, getpath(stack), storage)) From commits-noreply at bitbucket.org Tue Mar 15 19:40:44 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:40:44 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110315184044.A9566282B9E@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42680:45a5e65015a7 Date: 2011-03-15 14:40 -0400 http://bitbucket.org/pypy/pypy/changeset/45a5e65015a7/ Log: merge From commits-noreply at bitbucket.org Tue Mar 15 19:41:09 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:41:09 +0100 (CET) Subject: [pypy-svn] jitviewer default: (arigo, fijal, alex) improve display of loop args Message-ID: <20110315184109.1D623282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r111:874027a7d2ca Date: 2011-03-15 14:40 -0400 http://bitbucket.org/pypy/jitviewer/changeset/874027a7d2ca/ Log: (arigo, fijal, alex) improve display of loop args diff --git a/bin/jitviewer.py b/bin/jitviewer.py --- a/bin/jitviewer.py +++ b/bin/jitviewer.py @@ -68,7 +68,8 @@ else: is_entry = False func = FunctionHtml.from_operations(loop.operations, self.storage, - limit=1) + limit=1, + inputargs=loop.inputargs) func.count = getattr(loop, 'count', '?') loops.append((is_entry, index, func)) loops.sort(lambda a, b: cmp(b[2].count, a[2].count)) @@ -85,7 +86,8 @@ no = int(flask.request.args.get('no', '0')) orig_loop = self.storage.loops[no] ops = adjust_bridges(orig_loop, flask.request.args) - loop = FunctionHtml.from_operations(ops, self.storage) + loop = FunctionHtml.from_operations(ops, self.storage, + inputargs=orig_loop.inputargs) path = flask.request.args.get('path', '').split(',') if path: up = '"' + ','.join(path[:-1]) + '"' diff --git a/templates/loop.html b/templates/loop.html --- a/templates/loop.html +++ b/templates/loop.html @@ -1,6 +1,7 @@ {% if show_upper_path %} <-- Up {% endif %} +
{{ source.inputargs|safe }} {% for sourceline in source.lines %} {% if sourceline.in_loop %}
{{sourceline.line}}
diff --git a/_jitviewer/display.py b/_jitviewer/display.py --- a/_jitviewer/display.py +++ b/_jitviewer/display.py @@ -1,5 +1,5 @@ from pypy.jit.metainterp.resoperation import rop - +from _jitviewer.parser import cssclass class LineRepr(object): """ A representation of a single line @@ -24,6 +24,10 @@ def __init__(self, source, code, loop): lineset = loop.lineset self.lines = [] + html = [] + for v in loop.inputargs: + html.append(cssclass(v, v, onmouseover='highlight_var(this)', onmouseout='disable_var(this)')) + self.inputargs = " ".join(html) self.firstlineno = code.co_firstlineno for i, line in enumerate(source.split("\n")): no = i + code.co_firstlineno From commits-noreply at bitbucket.org Tue Mar 15 19:57:58 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 15 Mar 2011 19:57:58 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, arigo, fijal) Failing test case reproducing twisted.something.leftarrow Message-ID: <20110315185758.C7845282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42681:f7b5e6b21197 Date: 2011-03-15 14:57 -0400 http://bitbucket.org/pypy/pypy/changeset/f7b5e6b21197/ Log: (alex, arigo, fijal) Failing test case reproducing twisted.something.leftarrow fail. Hooray diff --git a/pypy/jit/metainterp/test/test_virtual.py b/pypy/jit/metainterp/test/test_virtual.py --- a/pypy/jit/metainterp/test/test_virtual.py +++ b/pypy/jit/metainterp/test/test_virtual.py @@ -843,6 +843,29 @@ assert self.meta_interp(f, []) == 10 self.check_loops(new_array=0) + def test_virtual_streq_bug(self): + mydriver = JitDriver(reds = ['i', 's', 'a'], greens = []) + + class A(object): + def __init__(self, state): + self.state = state + + def f(): + i = 0 + s = 0 + a = A("data") + while i < 10: + mydriver.jit_merge_point(i=i, a=a, s=s) + if i > 1: + if a.state == 'data': + a.state = 'escaped' + else: + s += 1 + i += 1 + return s + + res = self.meta_interp(f, [], repeat=7) + assert res == f() # ____________________________________________________________ # Run 1: all the tests instantiate a real RPython class From commits-noreply at bitbucket.org Tue Mar 15 20:04:49 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Tue, 15 Mar 2011 20:04:49 +0100 (CET) Subject: [pypy-svn] pypy default: implement get_total_memory for darwin, test that it returns > 1MB on all platforms Message-ID: <20110315190449.5181A282B9D@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42682:ceed7d933a6d Date: 2011-03-15 15:04 -0400 http://bitbucket.org/pypy/pypy/changeset/ceed7d933a6d/ Log: implement get_total_memory for darwin, test that it returns > 1MB on all platforms diff --git a/pypy/rpython/memory/gc/test/test_env.py b/pypy/rpython/memory/gc/test/test_env.py --- a/pypy/rpython/memory/gc/test/test_env.py +++ b/pypy/rpython/memory/gc/test/test_env.py @@ -15,6 +15,9 @@ assert x == y assert type(x) == type(y) +def test_get_total_memory(): + # total memory should be at least a megabyte + assert env.get_total_memory() > 1024*1024 def test_read_from_env(): saved = os.environ diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -99,8 +99,9 @@ def get_total_memory(): return get_total_memory_linux2('/proc/meminfo') -#elif sys.platform == 'darwin': -# ... +elif sys.platform == 'darwin': + def get_total_memory(): + return get_darwin_sysctl_signed('hw.memsize') else: def get_total_memory(): @@ -191,31 +192,31 @@ rffi.INT, sandboxsafe=True) -def get_darwin_cache_size(cache_key): - cache_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') +def get_darwin_sysctl_signed(sysctl_name): + sysctl_name_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') try: len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') try: size = rffi.sizeof(rffi.LONGLONG) - cache_p[0] = rffi.cast(rffi.LONGLONG, 0) + rval_p[0] = rffi.cast(rffi.LONGLONG, 0) len_p[0] = rffi.cast(rffi.SIZE_T, size) # XXX a hack for llhelper not being robust-enough - result = sysctlbyname(cache_key, - rffi.cast(rffi.VOIDP, cache_p), + result = sysctlbyname(sysctl_name, + rffi.cast(rffi.VOIDP, rval_p), len_p, lltype.nullptr(rffi.VOIDP.TO), rffi.cast(rffi.SIZE_T, 0)) - cache = 0 + rval = 0 if (rffi.cast(lltype.Signed, result) == 0 and rffi.cast(lltype.Signed, len_p[0]) == size): - cache = rffi.cast(lltype.Signed, cache_p[0]) - if rffi.cast(rffi.LONGLONG, cache) != cache_p[0]: - cache = 0 # overflow! - return cache + rval = rffi.cast(lltype.Signed, rval_p[0]) + if rffi.cast(rffi.LONGLONG, rval) != rval_p[0]: + rval = 0 # overflow! + return rval finally: lltype.free(len_p, flavor='raw') finally: - lltype.free(cache_p, flavor='raw') + lltype.free(sysctl_name_p, flavor='raw') def get_L2cache_darwin(): @@ -223,8 +224,8 @@ on the machine we are running on. """ debug_start("gc-hardware") - L2cache = get_darwin_cache_size("hw.l2cachesize") - L3cache = get_darwin_cache_size("hw.l3cachesize") + L2cache = get_darwin_sysctl_signed("hw.l2cachesize") + L3cache = get_darwin_sysctl_signed("hw.l3cachesize") debug_print("L2cache =", L2cache) debug_print("L3cache =", L3cache) debug_stop("gc-hardware") From commits-noreply at bitbucket.org Tue Mar 15 20:05:56 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Tue, 15 Mar 2011 20:05:56 +0100 (CET) Subject: [pypy-svn] pypy default: fix mistake in get_total_memory for darwin Message-ID: <20110315190556.949D8282B9D@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42683:a688b3d51be2 Date: 2011-03-15 15:05 -0400 http://bitbucket.org/pypy/pypy/changeset/a688b3d51be2/ Log: fix mistake in get_total_memory for darwin diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -193,7 +193,7 @@ sandboxsafe=True) def get_darwin_sysctl_signed(sysctl_name): - sysctl_name_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') + rval_p = lltype.malloc(rffi.LONGLONGP.TO, 1, flavor='raw') try: len_p = lltype.malloc(rffi.SIZE_TP.TO, 1, flavor='raw') try: @@ -216,7 +216,7 @@ finally: lltype.free(len_p, flavor='raw') finally: - lltype.free(sysctl_name_p, flavor='raw') + lltype.free(rval_p, flavor='raw') def get_L2cache_darwin(): From commits-noreply at bitbucket.org Tue Mar 15 20:44:33 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 20:44:33 +0100 (CET) Subject: [pypy-svn] pypy default: Writing an extra test which passes. It seems the issue is Message-ID: <20110315194433.4F905282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42684:0d3d17c69cca Date: 2011-03-15 15:44 -0400 http://bitbucket.org/pypy/pypy/changeset/0d3d17c69cca/ Log: Writing an extra test which passes. It seems the issue is only about call_pure. diff --git a/pypy/jit/metainterp/test/test_virtual.py b/pypy/jit/metainterp/test/test_virtual.py --- a/pypy/jit/metainterp/test/test_virtual.py +++ b/pypy/jit/metainterp/test/test_virtual.py @@ -852,15 +852,46 @@ def f(): i = 0 - s = 0 + s = 10000 a = A("data") while i < 10: mydriver.jit_merge_point(i=i, a=a, s=s) if i > 1: if a.state == 'data': a.state = 'escaped' + s += 1000 else: - s += 1 + s += 100 + else: + s += 10 + i += 1 + return s + + res = self.meta_interp(f, [], repeat=7) + assert res == f() + + def test_getfield_gc_pure_nobug(self): + mydriver = JitDriver(reds = ['i', 's', 'a'], greens = []) + + class A(object): + _immutable_fields_ = ['foo'] + def __init__(self, foo): + self.foo = foo + + prebuilt42 = A(42) + prebuilt43 = A(43) + + def f(): + i = 0 + s = 10000 + a = prebuilt42 + while i < 10: + mydriver.jit_merge_point(i=i, s=s, a=a) + if i > 1: + s += a.foo + a = prebuilt43 + else: + s += 10 i += 1 return s From commits-noreply at bitbucket.org Tue Mar 15 21:54:14 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 21:54:14 +0100 (CET) Subject: [pypy-svn] pypy default: Add _annenforceargs_. Message-ID: <20110315205414.9F0B9282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42685:e5852ab46bbb Date: 2011-03-15 16:53 -0400 http://bitbucket.org/pypy/pypy/changeset/e5852ab46bbb/ Log: Add _annenforceargs_. diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -648,6 +648,7 @@ data_start = cast_ptr_to_adr(llstrtype(data)) + \ offsetof(STRTYPE, 'chars') + itemoffsetof(STRTYPE.chars, 0) return cast(TYPEP, data_start) + get_nonmovingbuffer._annenforceargs_ = [strtype] # (str, char*) -> None def free_nonmovingbuffer(data, buf): @@ -666,6 +667,7 @@ keepalive_until_here(data) if not followed_2nd_path: lltype.free(buf, flavor='raw') + free_nonmovingbuffer._annenforceargs_ = [strtype, None] # int -> (char*, str) def alloc_buffer(count): From commits-noreply at bitbucket.org Tue Mar 15 22:38:31 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 22:38:31 +0100 (CET) Subject: [pypy-svn] pypy default: Add another _annenforceargs_. Message-ID: <20110315213831.591E1282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42686:719a25d4e74f Date: 2011-03-15 17:38 -0400 http://bitbucket.org/pypy/pypy/changeset/719a25d4e74f/ Log: Add another _annenforceargs_. diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -682,6 +682,7 @@ raw_buf = lltype.malloc(TYPEP.TO, count, flavor='raw') return raw_buf, lltype.nullptr(STRTYPE) alloc_buffer._always_inline_ = True # to get rid of the returned tuple + alloc_buffer._annenforceargs_ = [int] # (char*, str, int, int) -> None def str_from_buffer(raw_buf, gc_buf, allocated_size, needed_size): From commits-noreply at bitbucket.org Tue Mar 15 23:04:49 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 15 Mar 2011 23:04:49 +0100 (CET) Subject: [pypy-svn] pypy str-cmp-opt: merged upstream. Message-ID: <20110315220449.34E4B36C055@codespeak.net> Author: Alex Gaynor Branch: str-cmp-opt Changeset: r42687:34bec4f1d826 Date: 2011-03-14 23:15 -0400 http://bitbucket.org/pypy/pypy/changeset/34bec4f1d826/ Log: merged upstream. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5130,6 +5130,23 @@ """ self.optimize_loop(ops, expected) + def test_strlen_positive(self): + ops = """ + [p0] + i0 = strlen(p0) + i1 = int_ge(i0, 0) + guard_true(i1) [] + i2 = int_gt(i0, -1) + guard_true(i2) [] + jump(p0) + """ + expected = """ + [p0] + i0 = strlen(p0) + jump(p0) + """ + self.optimize_loop(ops, expected) + # ---------- def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): from pypy.jit.metainterp.optimizeopt import string From commits-noreply at bitbucket.org Tue Mar 15 23:04:50 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 15 Mar 2011 23:04:50 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, armin, mitsuhiko): Provide warnings and exceptions when you try to pass arguments to object.__init__ in various ways. Message-ID: <20110315220450.2B22936C055@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42688:25584faa6ace Date: 2011-03-15 18:04 -0400 http://bitbucket.org/pypy/pypy/changeset/25584faa6ace/ Log: (alex, armin, mitsuhiko): Provide warnings and exceptions when you try to pass arguments to object.__init__ in various ways. diff --git a/pypy/objspace/std/objecttype.py b/pypy/objspace/std/objecttype.py --- a/pypy/objspace/std/objecttype.py +++ b/pypy/objspace/std/objecttype.py @@ -78,7 +78,20 @@ return w_obj def descr__init__(space, w_obj, __args__): - pass + w_type = space.type(w_obj) + w_parent_new, _ = w_type.lookup_where('__new__') + w_parent_init, _ = w_type.lookup_where('__init__') + try: + __args__.fixedunpack(0) + except ValueError: + if w_parent_new is not space.w_object and w_parent_init is not space.w_object: + space.warn("object.__init__() takes no parameters", space.w_DeprecationWarning) + elif w_parent_new is space.w_object or w_parent_init is not space.w_object: + raise OperationError(space.w_TypeError, + space.wrap("object.__init__() takes no parameters") + ) + + @gateway.unwrap_spec(proto=int) def descr__reduce__(space, w_obj, proto=0): diff --git a/pypy/objspace/std/test/test_obj.py b/pypy/objspace/std/test/test_obj.py --- a/pypy/objspace/std/test/test_obj.py +++ b/pypy/objspace/std/test/test_obj.py @@ -6,10 +6,10 @@ import sys cpython_behavior = (not option.runappdirect or not hasattr(sys, 'pypy_translation_info')) - + cls.w_cpython_behavior = cls.space.wrap(cpython_behavior) cls.w_cpython_version = cls.space.wrap(tuple(sys.version_info)) - + def test_hash_builtin(self): if not self.cpython_behavior: skip("on pypy-c id == hash is not guaranteed") @@ -21,7 +21,7 @@ def test_hash_method(self): o = object() - assert hash(o) == o.__hash__() + assert hash(o) == o.__hash__() def test_hash_list(self): l = range(5) @@ -69,3 +69,26 @@ pass assert x().__subclasshook__(object()) is NotImplemented assert x.__subclasshook__(object()) is NotImplemented + + def test_object_init(self): + import warnings + + class A(object): + pass + + raises(TypeError, A().__init__, 3) + raises(TypeError, A().__init__, a=3) + + class B(object): + def __new__(cls): + return super(B, cls).__new__(cls) + + def __init__(self): + super(B, self).__init__(a=3) + + with warnings.catch_warnings(record=True) as log: + warnings.simplefilter("always", DeprecationWarning) + B() + assert len(log) == 1 + assert log[0].message.args == ("object.__init__() takes no parameters",) + assert type(log[0].message) is DeprecationWarning \ No newline at end of file From commits-noreply at bitbucket.org Tue Mar 15 23:04:50 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 15 Mar 2011 23:04:50 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, mitsuhiko) merged upstream. Message-ID: <20110315220450.7BB57282B9D@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42689:bb7612c31ddc Date: 2011-03-15 18:04 -0400 http://bitbucket.org/pypy/pypy/changeset/bb7612c31ddc/ Log: (alex, mitsuhiko) merged upstream. From commits-noreply at bitbucket.org Tue Mar 15 23:34:20 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 15 Mar 2011 23:34:20 +0100 (CET) Subject: [pypy-svn] pypy default: from __future__ import with_statement Message-ID: <20110315223420.1DB1F282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42690:1a0b6cbe2063 Date: 2011-03-15 18:34 -0400 http://bitbucket.org/pypy/pypy/changeset/1a0b6cbe2063/ Log: from __future__ import with_statement diff --git a/pypy/objspace/std/test/test_obj.py b/pypy/objspace/std/test/test_obj.py --- a/pypy/objspace/std/test/test_obj.py +++ b/pypy/objspace/std/test/test_obj.py @@ -1,3 +1,4 @@ +from __future__ import with_statement from pypy.conftest import option class AppTestObject: @@ -91,4 +92,4 @@ B() assert len(log) == 1 assert log[0].message.args == ("object.__init__() takes no parameters",) - assert type(log[0].message) is DeprecationWarning \ No newline at end of file + assert type(log[0].message) is DeprecationWarning From commits-noreply at bitbucket.org Tue Mar 15 23:43:23 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Tue, 15 Mar 2011 23:43:23 +0100 (CET) Subject: [pypy-svn] pypy default: fix linker options on Darwin_x86_64, used in the tests when compiling an executable and a dylib Message-ID: <20110315224323.62D1F282B9D@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42691:32246d221d37 Date: 2011-03-15 18:43 -0400 http://bitbucket.org/pypy/pypy/changeset/32246d221d37/ Log: fix linker options on Darwin_x86_64, used in the tests when compiling an executable and a dylib diff --git a/pypy/translator/platform/posix.py b/pypy/translator/platform/posix.py --- a/pypy/translator/platform/posix.py +++ b/pypy/translator/platform/posix.py @@ -139,6 +139,7 @@ ('CFLAGS', cflags), ('CFLAGSEXTRA', list(eci.compile_extra)), ('LDFLAGS', linkflags), + ('LDFLAGS_LINK', list(self.link_flags)), ('LDFLAGSEXTRA', list(eci.link_extra)), ('CC', self.cc), ('CC_LINK', eci.use_cpp_linker and 'g++' or '$(CC)'), @@ -165,7 +166,7 @@ 'int main(int argc, char* argv[]) ' '{ return $(PYPY_MAIN_FUNCTION)(argc, argv); }" > $@') m.rule('$(DEFAULT_TARGET)', ['$(TARGET)', 'main.o'], - '$(CC_LINK) main.o -L. -l$(SHARED_IMPORT_LIB) -o $@') + '$(CC_LINK) $(LDFLAGS_LINK) main.o -L. -l$(SHARED_IMPORT_LIB) -o $@') return m From commits-noreply at bitbucket.org Wed Mar 16 08:27:01 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 08:27:01 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: First commit of optimization and test. Message-ID: <20110316072701.E0C7036C201@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42692:a1e30d4f9658 Date: 2011-03-16 00:26 -0700 http://bitbucket.org/pypy/pypy/changeset/a1e30d4f9658/ Log: First commit of optimization and test. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -2,67 +2,43 @@ from pypy.jit.metainterp.optimizeopt.rewrite import OptRewrite from pypy.jit.metainterp.optimizeopt.intbounds import OptIntBounds from pypy.jit.metainterp.optimizeopt.virtualize import OptVirtualize +from pypy.jit.metainterp.optimizeopt.fold_intadd import OptAddition from pypy.jit.metainterp.optimizeopt.heap import OptHeap from pypy.jit.metainterp.optimizeopt.string import OptString from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble -from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall -from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify -from pypy.rlib.jit import PARAMETERS -from pypy.rlib.unroll import unrolling_iterable -ALL_OPTS = [('intbounds', OptIntBounds), - ('rewrite', OptRewrite), - ('virtualize', OptVirtualize), - ('string', OptString), - ('heap', OptHeap), - ('ffi', OptFfiCall), - ('unroll', None)] -# no direct instantiation of unroll -unroll_all_opts = unrolling_iterable(ALL_OPTS) - -ALL_OPTS_DICT = dict.fromkeys([name for name, _ in ALL_OPTS]) - -ALL_OPTS_NAMES = ':'.join([name for name, _ in ALL_OPTS]) -PARAMETERS['enable_opts'] = ALL_OPTS_NAMES - -def optimize_loop_1(metainterp_sd, loop, enable_opts, +def optimize_loop_1(metainterp_sd, loop, unroll=True, inline_short_preamble=True, retraced=False): """Optimize loop.operations to remove internal overheadish operations. """ - optimizations = [] - unroll = 'unroll' in enable_opts - for name, opt in unroll_all_opts: - if name in enable_opts: - if opt is not None: - o = opt() - if unroll and name == 'string': - o.enabled = False - # FIXME: Workaround to disable string optimisation - # during preamble but to keep it during the loop - optimizations.append(o) - - if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: - optimizations.append(OptSimplify()) - + opt_str = OptString() + optimizations = [ + OptAddition(), + OptIntBounds(), + OptRewrite(), + OptVirtualize(), + opt_str, + OptHeap(), + ] if inline_short_preamble: - optimizations = [OptInlineShortPreamble(retraced)] + optimizations + optimizations = [OptInlineShortPreamble(retraced)] + optimizations + + if metainterp_sd.jit_ffi: + from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall + optimizations = optimizations + [ + OptFfiCall(), + ] if unroll: + opt_str.enabled = False # FIXME: Workaround to disable string optimisation + # during preamble but to keep it during the loop optimize_unroll(metainterp_sd, loop, optimizations) else: optimizer = Optimizer(metainterp_sd, loop, optimizations) optimizer.propagate_all_forward() -def optimize_bridge_1(metainterp_sd, bridge, enable_opts, - inline_short_preamble=True, retraced=False): +def optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble=True, + retraced=False): """The same, but for a bridge. """ - enable_opts = enable_opts.copy() - try: - del enable_opts['unroll'] - except KeyError: - pass - optimize_loop_1(metainterp_sd, bridge, enable_opts, - inline_short_preamble, retraced) - -if __name__ == '__main__': - print ALL_OPTS_NAMES + optimize_loop_1(metainterp_sd, bridge, False, inline_short_preamble, + retraced) diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5,7 +5,7 @@ BaseTest) import pypy.jit.metainterp.optimizeopt.optimizer as optimizeopt import pypy.jit.metainterp.optimizeopt.virtualize as virtualize -from pypy.jit.metainterp.optimizeopt import optimize_loop_1, ALL_OPTS_DICT +from pypy.jit.metainterp.optimizeopt import optimize_loop_1 from pypy.jit.metainterp.optimizeutil import InvalidLoop from pypy.jit.metainterp.history import AbstractDescr, ConstInt, BoxInt from pypy.jit.metainterp.history import TreeLoop, LoopToken @@ -163,10 +163,7 @@ def optimize_loop(self, ops, optops, expected_preamble=None): loop = self.parse(ops) - if optops != "crash!": - expected = self.parse(optops) - else: - expected = "crash!" + expected = self.parse(optops) if expected_preamble: expected_preamble = self.parse(expected_preamble) # @@ -188,18 +185,17 @@ def clone_if_mutable(self): return self loop.preamble.start_resumedescr = FakeDescr() - optimize_loop_1(metainterp_sd, loop, ALL_OPTS_DICT) + optimize_loop_1(metainterp_sd, loop) # print print loop.preamble.inputargs print '\n'.join([str(o) for o in loop.preamble.operations]) - print + print print loop.inputargs print '\n'.join([str(o) for o in loop.operations]) print - - assert expected != "crash!", "should have raised an exception" + self.assert_equal(loop, expected) if expected_preamble: self.assert_equal(loop.preamble, expected_preamble, @@ -833,7 +829,7 @@ i3 = getfield_gc(p2, descr=valuedescr) escape(i3) p3 = new_with_vtable(ConstClass(node_vtable)) - setfield_gc(p3, i1, descr=valuedescr) + setfield_gc(p3, i1, descr=valuedescr) jump(i1, p3) """ # We cannot track virtuals that survive for more than two iterations. @@ -893,7 +889,7 @@ escape(i3) p2sub = new_with_vtable(ConstClass(node_vtable2)) setfield_gc(p2sub, i1, descr=valuedescr) - setfield_gc(p2, p2sub, descr=nextdescr) + setfield_gc(p2, p2sub, descr=nextdescr) jump(i1, p2, p2sub) """ expected = """ @@ -1018,7 +1014,7 @@ """ preamble = """ [i, p0] - i0 = getfield_gc(p0, descr=valuedescr) + i0 = getfield_gc(p0, descr=valuedescr) i1 = int_add(i0, i) jump(i, i1) """ @@ -1350,26 +1346,6 @@ self.node.value = 5 self.optimize_loop(ops, expected) - def test_getfield_gc_pure_3(self): - ops = """ - [] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - escape(p2) - p3 = getfield_gc_pure(p1, descr=nextdescr) - escape(p3) - jump() - """ - expected = """ - [] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - escape(p2) - escape(p2) - jump() - """ - self.optimize_loop(ops, expected) - def test_getfield_gc_nonpure_2(self): ops = """ [i] @@ -3464,7 +3440,7 @@ guard_true(i1) [] i2 = int_sub(i0, 10) i3 = int_lt(i2, -5) - guard_true(i3) [] + guard_true(i3) [] jump(i0) """ expected = """ @@ -3490,7 +3466,7 @@ i1 = int_lt(i0, 4) guard_true(i1) [] i1p = int_gt(i0, -4) - guard_true(i1p) [] + guard_true(i1p) [] i2 = int_sub(i0, 10) jump(i0) """ @@ -3773,7 +3749,7 @@ ops = """ [p4, p7, i30] p16 = getfield_gc(p4, descr=valuedescr) - p17 = getarrayitem_gc(p4, 1, descr=arraydescr) + p17 = getarrayitem_gc(p4, 1, descr=arraydescr) guard_value(p16, ConstPtr(myptr), descr=) [] i1 = getfield_raw(p7, descr=nextdescr) i2 = int_add(i1, i30) @@ -3830,47 +3806,6 @@ self.node.value = 5 self.optimize_loop(ops, expected) - def test_complains_getfieldpure_setfield(self): - from pypy.jit.metainterp.optimizeopt.heap import BogusPureField - ops = """ - [p3] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - setfield_gc(p1, p3, descr=nextdescr) - jump(p3) - """ - py.test.raises(BogusPureField, self.optimize_loop, ops, "crash!") - - def test_dont_complains_different_field(self): - ops = """ - [p3] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - setfield_gc(p1, p3, descr=otherdescr) - escape(p2) - jump(p3) - """ - expected = """ - [p3] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - setfield_gc(p1, p3, descr=otherdescr) - escape(p2) - jump(p3) - """ - self.optimize_loop(ops, expected) - - def test_dont_complains_different_object(self): - ops = """ - [] - p1 = escape() - p2 = getfield_gc_pure(p1, descr=nextdescr) - p3 = escape() - setfield_gc(p3, p1, descr=nextdescr) - jump() - """ - self.optimize_loop(ops, ops) - def test_getfield_guard_const(self): ops = """ [p0] @@ -3940,7 +3875,7 @@ jump(p0) """ self.optimize_loop(ops, expected, expected) - + def test_addsub_ovf(self): ops = """ [i0] @@ -4060,7 +3995,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4099,7 +4034,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4117,7 +4052,7 @@ guard_false(i7) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ preamble = """ @@ -4129,12 +4064,12 @@ guard_true(i6) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ expected = """ [i0, i1, i2, i3] - jump(i0, i1, i2, i3) + jump(i0, i1, i2, i3) """ self.optimize_loop(ops, expected, preamble) @@ -4192,7 +4127,7 @@ def test_division_to_rshift(self): ops = """ [i1, i2] - it = int_gt(i1, 0) + it = int_gt(i1, 0) guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) @@ -4210,15 +4145,15 @@ """ expected = """ [i1, i2] - it = int_gt(i1, 0) - guard_true(it)[] + it = int_gt(i1, 0) + guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) i5 = int_rshift(i1, 1) i6 = int_floordiv(3, i2) i7 = int_floordiv(i1, 3) i8 = int_floordiv(4, i2) - i9 = int_rshift(i1, 2) + i9 = int_rshift(i1, 2) i10 = int_floordiv(i1, 0) i11 = int_rshift(i1, 0) i12 = int_floordiv(i2, 2) @@ -4259,7 +4194,7 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) @@ -4283,19 +4218,16 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) - i16 = int_rshift(i15, 2) i17 = int_lshift(i1b, 100) i18 = int_rshift(i17, 100) - i19 = int_eq(i1b, i16) - guard_true(i19) [] jump(i2, i3, i1b, i2b) """ self.optimize_loop(ops, expected) - + def test_subsub_ovf(self): ops = """ [i0] @@ -4479,7 +4411,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4507,9 +4439,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_rshift(self): ops = """ @@ -4544,7 +4476,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4572,9 +4504,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_dont_backpropagate_rshift(self): ops = """ @@ -4587,7 +4519,7 @@ """ self.optimize_loop(ops, ops, ops) - + def test_mul_ovf(self): ops = """ [i0, i1] @@ -4726,7 +4658,7 @@ def sort_key(self): return id(self) - + for n in ('inst_w_seq', 'inst_index', 'inst_w_list', 'inst_length', 'inst_start', 'inst_step'): self.namespace[n] = FakeDescr(n) @@ -4768,7 +4700,7 @@ i87 = int_add(i84, i86) i91 = int_add(i80, 1) setfield_gc(p75, i91, descr=inst_index) - + p110 = same_as(ConstPtr(myptr)) i112 = same_as(3) i114 = same_as(39) @@ -4788,13 +4720,13 @@ p1 = getfield_gc(p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ preamble = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ expected = """ @@ -4807,7 +4739,7 @@ ops = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) jump(p0) @@ -4831,7 +4763,7 @@ p2 = new_with_vtable(ConstClass(node_vtable)) setfield_gc(p2, i1, descr=nextdescr) """ - + # ---------- def optimize_strunicode_loop(self, ops, optops, preamble=None): if not preamble: @@ -5113,40 +5045,6 @@ """ self.optimize_strunicode_loop(ops, expected) - def test_strgetitem_small(self): - ops = """ - [p0, i0] - i1 = strgetitem(p0, i0) - i2 = int_lt(i1, 256) - guard_true(i2) [] - i3 = int_ge(i1, 0) - guard_true(i3) [] - jump(p0, i0) - """ - expected = """ - [p0, i0] - i1 = strgetitem(p0, i0) - jump(p0, i0) - """ - self.optimize_loop(ops, expected) - - def test_strlen_positive(self): - ops = """ - [p0] - i0 = strlen(p0) - i1 = int_ge(i0, 0) - guard_true(i1) [] - i2 = int_gt(i0, -1) - guard_true(i2) [] - jump(p0) - """ - expected = """ - [p0] - i0 = strlen(p0) - jump(p0) - """ - self.optimize_loop(ops, expected) - # ---------- def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): from pypy.jit.metainterp.optimizeopt import string @@ -5506,9 +5404,24 @@ # more generally, supporting non-constant but virtual cases is # not obvious, because of the exception UnicodeDecodeError that # can be raised by ll_str2unicode() - - - + + +class TestFoldIntAdds(OptimizeOptTest, LLtypeMixin): + def test_fold(self): + ops = """ + [i0] + i1 = int_add(i0, 3) + i2 = int_add(i1, 16) + i3 = int_add(i2, 9) + jump(i3) + """ + + expected = """ + [i0] + i3 = int_add(i0, 28) + jump(i3) + """ + self.optimize_loop(ops, expected) ##class TestOOtype(OptimizeOptTest, OOtypeMixin): diff --git a/pypy/jit/metainterp/optimizeopt/fold_intadd.py b/pypy/jit/metainterp/optimizeopt/fold_intadd.py new file mode 100644 --- /dev/null +++ b/pypy/jit/metainterp/optimizeopt/fold_intadd.py @@ -0,0 +1,77 @@ +from pypy.jit.metainterp.optimizeopt.optimizer import * +from pypy.jit.metainterp.resoperation import opboolinvers, opboolreflex +from pypy.jit.metainterp.history import ConstInt +from pypy.jit.metainterp.optimizeutil import _findall +from pypy.jit.metainterp.resoperation import rop, ResOperation +from pypy.jit.codewriter.effectinfo import EffectInfo +from pypy.jit.metainterp.optimizeopt.intutils import IntBound +from pypy.rlib.rarithmetic import highest_bit + +class OptAddition(Optimization): + def __init__(self): + self.args = {} + + def reconstruct_for_next_iteration(self, optimizer, valuemap): + return OptAddition() + + def propagate_forward(self, op): + opnum = op.getopnum() + for value, func in optimize_ops: + if opnum == value: + func(self, op) + break + else: + self.optimize_default(op) + + def _int_add(self, variable, constant, result): + return ResOperation(rop.INT_ADD, [variable, constant], result) + + def _store_add(self, variable, constant, result): + try: + root, stored_constant = self.args[variable] + constant = constant + stored_constant + except KeyError: + root = variable + + self.args[result] = root, constant + + def optimize_INT_ADD(self, op): + lv = self.getvalue(op.getarg(0)) + rv = self.getvalue(op.getarg(1)) + print "lv = %s rv = %s" % (lv.box, rv.box) + result = op.result + if lv.is_constant() and rv.is_constant(): + self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? + elif lv.is_constant(): + constant = lv.box.getint() + self._store_add(op.getarg(1), constant, result) + elif rv.is_constant(): + constant = rv.box.getint() + self._store_add(op.getarg(0), constant, result) + else: + self.emit_operation(op) + + def optimize_default(self, op): + for i in range(op.numargs()): + arg = self.getvalue(op.getarg(i)) + print 'type(%s) = %s' % (arg.box, type(arg)) + if arg.is_constant(): + continue + + try: + variable = op.getarg(i) + root, constant = self.args[variable] + del self.args[variable] # TODO: mark as used instead of deleting + + constant = ConstInt(constant) + new_op = self._int_add(root, constant, variable) + print new_op + self.emit_operation(new_op) + except KeyError: + pass + print op + self.emit_operation(op) + + #def optimize_INT_SUB(self, op): pass + +optimize_ops = _findall(OptAddition, 'optimize_') From commits-noreply at bitbucket.org Wed Mar 16 08:37:21 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 08:37:21 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Reverted and fixed files I clobbered. Message-ID: <20110316073721.AD234282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42693:ec125b51f088 Date: 2011-03-16 00:37 -0700 http://bitbucket.org/pypy/pypy/changeset/ec125b51f088/ Log: Reverted and fixed files I clobbered. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5,7 +5,7 @@ BaseTest) import pypy.jit.metainterp.optimizeopt.optimizer as optimizeopt import pypy.jit.metainterp.optimizeopt.virtualize as virtualize -from pypy.jit.metainterp.optimizeopt import optimize_loop_1 +from pypy.jit.metainterp.optimizeopt import optimize_loop_1, ALL_OPTS_DICT from pypy.jit.metainterp.optimizeutil import InvalidLoop from pypy.jit.metainterp.history import AbstractDescr, ConstInt, BoxInt from pypy.jit.metainterp.history import TreeLoop, LoopToken @@ -163,7 +163,10 @@ def optimize_loop(self, ops, optops, expected_preamble=None): loop = self.parse(ops) - expected = self.parse(optops) + if optops != "crash!": + expected = self.parse(optops) + else: + expected = "crash!" if expected_preamble: expected_preamble = self.parse(expected_preamble) # @@ -185,17 +188,18 @@ def clone_if_mutable(self): return self loop.preamble.start_resumedescr = FakeDescr() - optimize_loop_1(metainterp_sd, loop) + optimize_loop_1(metainterp_sd, loop, ALL_OPTS_DICT) # print print loop.preamble.inputargs print '\n'.join([str(o) for o in loop.preamble.operations]) - print + print print loop.inputargs print '\n'.join([str(o) for o in loop.operations]) print - + + assert expected != "crash!", "should have raised an exception" self.assert_equal(loop, expected) if expected_preamble: self.assert_equal(loop.preamble, expected_preamble, @@ -829,7 +833,7 @@ i3 = getfield_gc(p2, descr=valuedescr) escape(i3) p3 = new_with_vtable(ConstClass(node_vtable)) - setfield_gc(p3, i1, descr=valuedescr) + setfield_gc(p3, i1, descr=valuedescr) jump(i1, p3) """ # We cannot track virtuals that survive for more than two iterations. @@ -889,7 +893,7 @@ escape(i3) p2sub = new_with_vtable(ConstClass(node_vtable2)) setfield_gc(p2sub, i1, descr=valuedescr) - setfield_gc(p2, p2sub, descr=nextdescr) + setfield_gc(p2, p2sub, descr=nextdescr) jump(i1, p2, p2sub) """ expected = """ @@ -1014,7 +1018,7 @@ """ preamble = """ [i, p0] - i0 = getfield_gc(p0, descr=valuedescr) + i0 = getfield_gc(p0, descr=valuedescr) i1 = int_add(i0, i) jump(i, i1) """ @@ -1346,6 +1350,26 @@ self.node.value = 5 self.optimize_loop(ops, expected) + def test_getfield_gc_pure_3(self): + ops = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + escape(p2) + p3 = getfield_gc_pure(p1, descr=nextdescr) + escape(p3) + jump() + """ + expected = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + escape(p2) + escape(p2) + jump() + """ + self.optimize_loop(ops, expected) + def test_getfield_gc_nonpure_2(self): ops = """ [i] @@ -3440,7 +3464,7 @@ guard_true(i1) [] i2 = int_sub(i0, 10) i3 = int_lt(i2, -5) - guard_true(i3) [] + guard_true(i3) [] jump(i0) """ expected = """ @@ -3466,7 +3490,7 @@ i1 = int_lt(i0, 4) guard_true(i1) [] i1p = int_gt(i0, -4) - guard_true(i1p) [] + guard_true(i1p) [] i2 = int_sub(i0, 10) jump(i0) """ @@ -3749,7 +3773,7 @@ ops = """ [p4, p7, i30] p16 = getfield_gc(p4, descr=valuedescr) - p17 = getarrayitem_gc(p4, 1, descr=arraydescr) + p17 = getarrayitem_gc(p4, 1, descr=arraydescr) guard_value(p16, ConstPtr(myptr), descr=) [] i1 = getfield_raw(p7, descr=nextdescr) i2 = int_add(i1, i30) @@ -3806,6 +3830,47 @@ self.node.value = 5 self.optimize_loop(ops, expected) + def test_complains_getfieldpure_setfield(self): + from pypy.jit.metainterp.optimizeopt.heap import BogusPureField + ops = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=nextdescr) + jump(p3) + """ + py.test.raises(BogusPureField, self.optimize_loop, ops, "crash!") + + def test_dont_complains_different_field(self): + ops = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=otherdescr) + escape(p2) + jump(p3) + """ + expected = """ + [p3] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + setfield_gc(p1, p3, descr=otherdescr) + escape(p2) + jump(p3) + """ + self.optimize_loop(ops, expected) + + def test_dont_complains_different_object(self): + ops = """ + [] + p1 = escape() + p2 = getfield_gc_pure(p1, descr=nextdescr) + p3 = escape() + setfield_gc(p3, p1, descr=nextdescr) + jump() + """ + self.optimize_loop(ops, ops) + def test_getfield_guard_const(self): ops = """ [p0] @@ -3875,7 +3940,7 @@ jump(p0) """ self.optimize_loop(ops, expected, expected) - + def test_addsub_ovf(self): ops = """ [i0] @@ -3995,7 +4060,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4034,7 +4099,7 @@ """ expected = """ [i0, i1, i2] - jump(i0, i1, i2) + jump(i0, i1, i2) """ self.optimize_loop(ops, expected, preamble) @@ -4052,7 +4117,7 @@ guard_false(i7) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ preamble = """ @@ -4064,12 +4129,12 @@ guard_true(i6) [] i8 = int_gt(i2c, -7) guard_true(i8) [] - i9 = int_is_zero(i2c) + i9 = int_is_zero(i2c) jump(i1, i2a, i2b, i2c) """ expected = """ [i0, i1, i2, i3] - jump(i0, i1, i2, i3) + jump(i0, i1, i2, i3) """ self.optimize_loop(ops, expected, preamble) @@ -4127,7 +4192,7 @@ def test_division_to_rshift(self): ops = """ [i1, i2] - it = int_gt(i1, 0) + it = int_gt(i1, 0) guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) @@ -4145,15 +4210,15 @@ """ expected = """ [i1, i2] - it = int_gt(i1, 0) - guard_true(it)[] + it = int_gt(i1, 0) + guard_true(it)[] i3 = int_floordiv(i1, i2) i4 = int_floordiv(2, i2) i5 = int_rshift(i1, 1) i6 = int_floordiv(3, i2) i7 = int_floordiv(i1, 3) i8 = int_floordiv(4, i2) - i9 = int_rshift(i1, 2) + i9 = int_rshift(i1, 2) i10 = int_floordiv(i1, 0) i11 = int_rshift(i1, 0) i12 = int_floordiv(i2, 2) @@ -4194,7 +4259,7 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) @@ -4218,16 +4283,19 @@ i9 = int_lt(i1b, 100) guard_true(i9) [] i10 = int_gt(i1b, -100) - guard_true(i10) [] + guard_true(i10) [] i13 = int_lshift(i1b, i2) i14 = int_rshift(i13, i2) i15 = int_lshift(i1b, 2) + i16 = int_rshift(i15, 2) i17 = int_lshift(i1b, 100) i18 = int_rshift(i17, 100) + i19 = int_eq(i1b, i16) + guard_true(i19) [] jump(i2, i3, i1b, i2b) """ self.optimize_loop(ops, expected) - + def test_subsub_ovf(self): ops = """ [i0] @@ -4411,7 +4479,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4439,9 +4507,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_rshift(self): ops = """ @@ -4476,7 +4544,7 @@ jump(i0, i1, i1b, i2, i3) """ preamble = """ - [i0, i1, i1b, i2, i3] + [i0, i1, i1b, i2, i3] i4 = int_lt(i1, 7) guard_true(i4) [] i4b = int_lt(i1b, 7) @@ -4504,9 +4572,9 @@ """ expected = """ [i0, i1, i1b, i2, i3] - jump(i0, i1, i1b, i2, i3) - """ - self.optimize_loop(ops, expected, preamble) + jump(i0, i1, i1b, i2, i3) + """ + self.optimize_loop(ops, expected, preamble) def test_bound_dont_backpropagate_rshift(self): ops = """ @@ -4519,7 +4587,7 @@ """ self.optimize_loop(ops, ops, ops) - + def test_mul_ovf(self): ops = """ [i0, i1] @@ -4658,7 +4726,7 @@ def sort_key(self): return id(self) - + for n in ('inst_w_seq', 'inst_index', 'inst_w_list', 'inst_length', 'inst_start', 'inst_step'): self.namespace[n] = FakeDescr(n) @@ -4700,7 +4768,7 @@ i87 = int_add(i84, i86) i91 = int_add(i80, 1) setfield_gc(p75, i91, descr=inst_index) - + p110 = same_as(ConstPtr(myptr)) i112 = same_as(3) i114 = same_as(39) @@ -4720,13 +4788,13 @@ p1 = getfield_gc(p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ preamble = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) jump(p0) """ expected = """ @@ -4739,7 +4807,7 @@ ops = """ [p0] p1 = getfield_gc(p0, descr=valuedescr) - setfield_gc(p0, p0, descr=valuedescr) + setfield_gc(p0, p0, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) setfield_gc(p0, p1, descr=valuedescr) jump(p0) @@ -4763,7 +4831,7 @@ p2 = new_with_vtable(ConstClass(node_vtable)) setfield_gc(p2, i1, descr=nextdescr) """ - + # ---------- def optimize_strunicode_loop(self, ops, optops, preamble=None): if not preamble: @@ -5045,6 +5113,40 @@ """ self.optimize_strunicode_loop(ops, expected) + def test_strgetitem_small(self): + ops = """ + [p0, i0] + i1 = strgetitem(p0, i0) + i2 = int_lt(i1, 256) + guard_true(i2) [] + i3 = int_ge(i1, 0) + guard_true(i3) [] + jump(p0, i0) + """ + expected = """ + [p0, i0] + i1 = strgetitem(p0, i0) + jump(p0, i0) + """ + self.optimize_loop(ops, expected) + + def test_strlen_positive(self): + ops = """ + [p0] + i0 = strlen(p0) + i1 = int_ge(i0, 0) + guard_true(i1) [] + i2 = int_gt(i0, -1) + guard_true(i2) [] + jump(p0) + """ + expected = """ + [p0] + i0 = strlen(p0) + jump(p0) + """ + self.optimize_loop(ops, expected) + # ---------- def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): from pypy.jit.metainterp.optimizeopt import string @@ -5404,7 +5506,6 @@ # more generally, supporting non-constant but virtual cases is # not obvious, because of the exception UnicodeDecodeError that # can be raised by ll_str2unicode() - class TestFoldIntAdds(OptimizeOptTest, LLtypeMixin): def test_fold(self): From commits-noreply at bitbucket.org Wed Mar 16 08:41:33 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 08:41:33 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Forgot to fix this one. Message-ID: <20110316074133.4A149282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42694:831f61259229 Date: 2011-03-16 00:39 -0700 http://bitbucket.org/pypy/pypy/changeset/831f61259229/ Log: Forgot to fix this one. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -2,43 +2,67 @@ from pypy.jit.metainterp.optimizeopt.rewrite import OptRewrite from pypy.jit.metainterp.optimizeopt.intbounds import OptIntBounds from pypy.jit.metainterp.optimizeopt.virtualize import OptVirtualize -from pypy.jit.metainterp.optimizeopt.fold_intadd import OptAddition from pypy.jit.metainterp.optimizeopt.heap import OptHeap from pypy.jit.metainterp.optimizeopt.string import OptString from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble +from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall +from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify +from pypy.rlib.jit import PARAMETERS +from pypy.rlib.unroll import unrolling_iterable -def optimize_loop_1(metainterp_sd, loop, unroll=True, +ALL_OPTS = [('intbounds', OptIntBounds), + ('rewrite', OptRewrite), + ('virtualize', OptVirtualize), + ('string', OptString), + ('heap', OptHeap), + ('ffi', OptFfiCall), + ('unroll', None)] +# no direct instantiation of unroll +unroll_all_opts = unrolling_iterable(ALL_OPTS) + +ALL_OPTS_DICT = dict.fromkeys([name for name, _ in ALL_OPTS]) + +ALL_OPTS_NAMES = ':'.join([name for name, _ in ALL_OPTS]) +PARAMETERS['enable_opts'] = ALL_OPTS_NAMES + +def optimize_loop_1(metainterp_sd, loop, enable_opts, inline_short_preamble=True, retraced=False): """Optimize loop.operations to remove internal overheadish operations. """ - opt_str = OptString() - optimizations = [ - OptAddition(), - OptIntBounds(), - OptRewrite(), - OptVirtualize(), - opt_str, - OptHeap(), - ] + optimizations = [] + unroll = 'unroll' in enable_opts + for name, opt in unroll_all_opts: + if name in enable_opts: + if opt is not None: + o = opt() + if unroll and name == 'string': + o.enabled = False + # FIXME: Workaround to disable string optimisation + # during preamble but to keep it during the loop + optimizations.append(o) + + if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: + optimizations.append(OptSimplify()) + if inline_short_preamble: - optimizations = [OptInlineShortPreamble(retraced)] + optimizations - - if metainterp_sd.jit_ffi: - from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall - optimizations = optimizations + [ - OptFfiCall(), - ] + optimizations = [OptInlineShortPreamble(retraced)] + optimizations if unroll: - opt_str.enabled = False # FIXME: Workaround to disable string optimisation - # during preamble but to keep it during the loop optimize_unroll(metainterp_sd, loop, optimizations) else: optimizer = Optimizer(metainterp_sd, loop, optimizations) optimizer.propagate_all_forward() -def optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble=True, - retraced=False): +def optimize_bridge_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble=True, retraced=False): """The same, but for a bridge. """ - optimize_loop_1(metainterp_sd, bridge, False, inline_short_preamble, - retraced) + enable_opts = enable_opts.copy() + try: + del enable_opts['unroll'] + except KeyError: + pass + optimize_loop_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble, retraced) + +if __name__ == '__main__': + print ALL_OPTS_NAMES From commits-noreply at bitbucket.org Wed Mar 16 08:41:33 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 08:41:33 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Re-added my optimization. Message-ID: <20110316074133.C65E5282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42695:6dabfe362323 Date: 2011-03-16 00:41 -0700 http://bitbucket.org/pypy/pypy/changeset/6dabfe362323/ Log: Re-added my optimization. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -7,10 +7,13 @@ from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify +from pypy.jit.metainterp.optimizeopt.fold_intadd import OptAddition from pypy.rlib.jit import PARAMETERS from pypy.rlib.unroll import unrolling_iterable -ALL_OPTS = [('intbounds', OptIntBounds), +ALL_OPTS = [ + ('fold_intadd', OptAddition), + ('intbounds', OptIntBounds), ('rewrite', OptRewrite), ('virtualize', OptVirtualize), ('string', OptString), From commits-noreply at bitbucket.org Wed Mar 16 10:02:20 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 10:02:20 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: (hakanardo) Changed code to emit all intermediate operations, leave elimination to backend. Magically cured the errors I was getting. Message-ID: <20110316090220.493EA282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42696:62d080096e19 Date: 2011-03-16 02:01 -0700 http://bitbucket.org/pypy/pypy/changeset/62d080096e19/ Log: (hakanardo) Changed code to emit all intermediate operations, leave elimination to backend. Magically cured the errors I was getting. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -12,8 +12,8 @@ from pypy.rlib.unroll import unrolling_iterable ALL_OPTS = [ + ('intbounds', OptIntBounds), ('fold_intadd', OptAddition), - ('intbounds', OptIntBounds), ('rewrite', OptRewrite), ('virtualize', OptVirtualize), ('string', OptString), diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5519,6 +5519,8 @@ expected = """ [i0] + i1 = int_add(i0, 3) + i2 = int_add(i0, 19) i3 = int_add(i0, 28) jump(i3) """ diff --git a/pypy/jit/metainterp/optimizeopt/fold_intadd.py b/pypy/jit/metainterp/optimizeopt/fold_intadd.py --- a/pypy/jit/metainterp/optimizeopt/fold_intadd.py +++ b/pypy/jit/metainterp/optimizeopt/fold_intadd.py @@ -35,6 +35,11 @@ self.args[result] = root, constant + constant = ConstInt(constant) + new_op = self._int_add(root, constant, result) + print new_op + self.emit_operation(new_op) + def optimize_INT_ADD(self, op): lv = self.getvalue(op.getarg(0)) rv = self.getvalue(op.getarg(1)) From commits-noreply at bitbucket.org Wed Mar 16 10:54:44 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 10:54:44 +0100 (CET) Subject: [pypy-svn] pypy default: port this test Message-ID: <20110316095444.A71FD282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42697:bd9e3c37654f Date: 2011-03-16 10:44 +0100 http://bitbucket.org/pypy/pypy/changeset/bd9e3c37654f/ Log: port this test diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -541,6 +541,27 @@ jump(p0, p1, p2, i12, p4, descr=) """) + def test_exception_inside_loop_2(self): + def main(n): + def g(n): + raise ValueError(n) # ID: raise + def f(n): + g(n) + # + while n: + try: + f(n) + except ValueError: + pass + n -= 1 + return n + # + log = self.run(main, [1000], threshold=400) + assert log.result == 0 + loop, = log.loops_by_filename(self.filepath) + ops = log.opnames(loop.ops_by_id('raise')) + assert 'new' not in ops + def test_reraise(self): def f(n): i = 0 From commits-noreply at bitbucket.org Wed Mar 16 10:54:45 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 10:54:45 +0100 (CET) Subject: [pypy-svn] pypy default: improve the test Message-ID: <20110316095445.3DFA9282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42698:f6fabf47a027 Date: 2011-03-16 10:46 +0100 http://bitbucket.org/pypy/pypy/changeset/f6fabf47a027/ Log: improve the test diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -578,3 +578,11 @@ log = self.run(f, [100000]) assert log.result == 100000 loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i7 = int_lt(i4, i5) + guard_true(i7, descr=) + --EXC-TICK-- + i14 = int_add(i4, 1) + --TICK-- + jump(p0, p1, p2, p3, i14, i5, p6, descr=) + """) From commits-noreply at bitbucket.org Wed Mar 16 10:54:45 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 10:54:45 +0100 (CET) Subject: [pypy-svn] pypy default: fix tests that were broken by 90095e723230; not sure if it is the right fix, though Message-ID: <20110316095445.D9A26282B9D@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42699:f5f1f3a3116f Date: 2011-03-16 10:54 +0100 http://bitbucket.org/pypy/pypy/changeset/f5f1f3a3116f/ Log: fix tests that were broken by 90095e723230; not sure if it is the right fix, though diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -177,7 +177,7 @@ if bc.inline_level is not None and bc.inline_level + 1 != len(stack): if bc.inline_level < len(stack): last = stack.pop() - stack[-1].append(cls(last, getpath(stack), storage)) + stack[-1].append(cls(last, getpath(stack), storage, inputargs)) else: stack.append([]) stack[-1].append(bc) From commits-noreply at bitbucket.org Wed Mar 16 10:54:46 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 10:54:46 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110316095446.22542282B9E@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42700:a0600a473e00 Date: 2011-03-16 10:54 +0100 http://bitbucket.org/pypy/pypy/changeset/a0600a473e00/ Log: merge heads From commits-noreply at bitbucket.org Wed Mar 16 10:57:45 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 10:57:45 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Added support for subtraction and corresponding test. Message-ID: <20110316095745.29FB036C20E@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42701:76b1630dd06b Date: 2011-03-16 02:56 -0700 http://bitbucket.org/pypy/pypy/changeset/76b1630dd06b/ Log: Added support for subtraction and corresponding test. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5508,7 +5508,7 @@ # can be raised by ll_str2unicode() class TestFoldIntAdds(OptimizeOptTest, LLtypeMixin): - def test_fold(self): + def test_fold_add(self): ops = """ [i0] i1 = int_add(i0, 3) @@ -5526,6 +5526,27 @@ """ self.optimize_loop(ops, expected) + def test_fold_sub(self): + ops = """ + [i0] + i1 = int_add(i0, 3) + i2 = int_sub(i1, 16) + i3 = int_add(i2, 9) + i4 = int_sub(i3, 29) + jump(i3) + """ + + expected = """ + [i0] + i1 = int_add(i0, 3) + i2 = int_sub(i0, 13) + i3 = int_sub(i0, 4) + i4 = int_sub(i0, 33) + jump(i3) + """ + self.optimize_loop(ops, expected) + + ##class TestOOtype(OptimizeOptTest, OOtypeMixin): ## def test_instanceof(self): diff --git a/pypy/jit/metainterp/optimizeopt/fold_intadd.py b/pypy/jit/metainterp/optimizeopt/fold_intadd.py --- a/pypy/jit/metainterp/optimizeopt/fold_intadd.py +++ b/pypy/jit/metainterp/optimizeopt/fold_intadd.py @@ -21,12 +21,18 @@ func(self, op) break else: - self.optimize_default(op) + #self.optimize_default(op) + self.emit_operation(op) - def _int_add(self, variable, constant, result): - return ResOperation(rop.INT_ADD, [variable, constant], result) + def _int_operation(self, variable, constant, result): + if constant < 0: + constant = ConstInt(-constant) + return ResOperation(rop.INT_SUB, [variable, constant], result) + else: + constant = ConstInt(constant) + return ResOperation(rop.INT_ADD, [variable, constant], result) - def _store_add(self, variable, constant, result): + def _process_add(self, variable, constant, result): try: root, stored_constant = self.args[variable] constant = constant + stored_constant @@ -35,48 +41,56 @@ self.args[result] = root, constant - constant = ConstInt(constant) - new_op = self._int_add(root, constant, result) - print new_op + new_op = self._int_operation(root, constant, result) self.emit_operation(new_op) def optimize_INT_ADD(self, op): lv = self.getvalue(op.getarg(0)) rv = self.getvalue(op.getarg(1)) - print "lv = %s rv = %s" % (lv.box, rv.box) result = op.result if lv.is_constant() and rv.is_constant(): self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? elif lv.is_constant(): constant = lv.box.getint() - self._store_add(op.getarg(1), constant, result) + self._process_add(op.getarg(1), constant, result) elif rv.is_constant(): constant = rv.box.getint() - self._store_add(op.getarg(0), constant, result) + self._process_add(op.getarg(0), constant, result) + else: + self.emit_operation(op) + + def optimize_INT_SUB(self, op): + lv = self.getvalue(op.getarg(0)) + rv = self.getvalue(op.getarg(1)) + result = op.result + if lv.is_constant() and rv.is_constant(): + self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? + elif lv.is_constant(): + #constant = lv.box.getint() + #self._process_add(op.getarg(1), constant, result) + # TODO: implement + self.emit_operation(op) + elif rv.is_constant(): + constant = rv.box.getint() + self._process_add(op.getarg(0), -constant, result) else: self.emit_operation(op) def optimize_default(self, op): for i in range(op.numargs()): arg = self.getvalue(op.getarg(i)) - print 'type(%s) = %s' % (arg.box, type(arg)) if arg.is_constant(): continue try: variable = op.getarg(i) root, constant = self.args[variable] - del self.args[variable] # TODO: mark as used instead of deleting - constant = ConstInt(constant) - new_op = self._int_add(root, constant, variable) - print new_op + new_op = self._int_operation(root, constant, variable) self.emit_operation(new_op) except KeyError: pass - print op self.emit_operation(op) - #def optimize_INT_SUB(self, op): pass optimize_ops = _findall(OptAddition, 'optimize_') From commits-noreply at bitbucket.org Wed Mar 16 10:58:28 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 10:58:28 +0100 (CET) Subject: [pypy-svn] pypy default: Fix test. Message-ID: <20110316095828.CD99936C20E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42702:a60b01e0e9ee Date: 2011-03-16 05:56 -0400 http://bitbucket.org/pypy/pypy/changeset/a60b01e0e9ee/ Log: Fix test. diff --git a/pypy/tool/jitlogparser/parser.py b/pypy/tool/jitlogparser/parser.py --- a/pypy/tool/jitlogparser/parser.py +++ b/pypy/tool/jitlogparser/parser.py @@ -147,7 +147,7 @@ # factory method TraceForOpcode = TraceForOpcode - def __init__(self, chunks, path, storage, inputargs): + def __init__(self, chunks, path, storage, inputargs=''): self.path = path self.inputargs = inputargs self.chunks = chunks From commits-noreply at bitbucket.org Wed Mar 16 10:58:29 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 10:58:29 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110316095829.5521936C20E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42703:94dc9f22389a Date: 2011-03-16 05:58 -0400 http://bitbucket.org/pypy/pypy/changeset/94dc9f22389a/ Log: merge heads From commits-noreply at bitbucket.org Wed Mar 16 11:14:38 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 16 Mar 2011 11:14:38 +0100 (CET) Subject: [pypy-svn] pypy jit-usable_retrace: hg merge default Message-ID: <20110316101438.21143282B9D@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42704:d532da6bf910 Date: 2011-03-13 13:07 +0100 http://bitbucket.org/pypy/pypy/changeset/d532da6bf910/ Log: hg merge default diff --git a/pypy/module/conftest.py b/pypy/module/conftest.py deleted file mode 100644 --- a/pypy/module/conftest.py +++ /dev/null @@ -1,18 +0,0 @@ -import py -from pypy.tool.lib_pypy import LIB_PYPY - -class MultipleDirCollector(py.test.collect.Collector): - def __init__(self, name, mainfspath, fspaths, parent=None, config=None): - super(MultipleDirCollector, self).__init__(name, parent, config) - self.main_collector = py.test.collect.Directory(mainfspath, self) - self.collectors = [py.test.collect.Directory(fspath, self) - for fspath in fspaths] - - def collect(self): - return self.main_collector.collect() + self.collectors - - -def pytest_collect_directory(path, parent): - if path.basename == 'test_lib_pypy': - # collect all the test in BOTH test_lib_pypy and ../../lib_pypy - return MultipleDirCollector(path.basename, path, [LIB_PYPY], parent) diff --git a/py/_test/parseopt.py b/py/_test/parseopt.py deleted file mode 100644 --- a/py/_test/parseopt.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -thin wrapper around Python's optparse.py -adding some extra checks and ways to systematically -have Environment variables provide default values -for options. basic usage: - - >>> parser = Parser() - >>> parser.addoption("--hello", action="store_true", dest="hello") - >>> option, args = parser.parse(['--hello']) - >>> option.hello - True - >>> args - [] - -""" -import py -import optparse - -class Parser: - """ Parser for command line arguments. """ - - def __init__(self, usage=None, processopt=None): - self._anonymous = OptionGroup("custom options", parser=self) - self._groups = [] - self._processopt = processopt - self._usage = usage - self.hints = [] - - def processoption(self, option): - if self._processopt: - if option.dest: - self._processopt(option) - - def addnote(self, note): - self._notes.append(note) - - def getgroup(self, name, description="", after=None): - for group in self._groups: - if group.name == name: - return group - group = OptionGroup(name, description, parser=self) - i = 0 - for i, grp in enumerate(self._groups): - if grp.name == after: - break - self._groups.insert(i+1, group) - return group - - addgroup = getgroup - def addgroup(self, name, description=""): - py.log._apiwarn("1.1", "use getgroup() which gets-or-creates") - return self.getgroup(name, description) - - def addoption(self, *opts, **attrs): - """ add an optparse-style option. """ - self._anonymous.addoption(*opts, **attrs) - - def parse(self, args): - optparser = MyOptionParser(self) - groups = self._groups + [self._anonymous] - for group in groups: - if group.options: - desc = group.description or group.name - optgroup = optparse.OptionGroup(optparser, desc) - optgroup.add_options(group.options) - optparser.add_option_group(optgroup) - return optparser.parse_args([str(x) for x in args]) - - def parse_setoption(self, args, option): - parsedoption, args = self.parse(args) - for name, value in parsedoption.__dict__.items(): - setattr(option, name, value) - return args - - -class OptionGroup: - def __init__(self, name, description="", parser=None): - self.name = name - self.description = description - self.options = [] - self.parser = parser - - def addoption(self, *optnames, **attrs): - """ add an option to this group. """ - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=False) - - def _addoption(self, *optnames, **attrs): - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=True) - - def _addoption_instance(self, option, shortupper=False): - if not shortupper: - for opt in option._short_opts: - if opt[0] == '-' and opt[1].islower(): - raise ValueError("lowercase shortoptions reserved") - if self.parser: - self.parser.processoption(option) - self.options.append(option) - - -class MyOptionParser(optparse.OptionParser): - def __init__(self, parser): - self._parser = parser - optparse.OptionParser.__init__(self, usage=parser._usage) - def format_epilog(self, formatter): - hints = self._parser.hints - if hints: - s = "\n".join(["hint: " + x for x in hints]) + "\n" - s = "\n" + s + "\n" - return s - return "" diff --git a/py/_plugin/pytest_pdb.py b/py/_plugin/pytest_pdb.py deleted file mode 100644 --- a/py/_plugin/pytest_pdb.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -interactive debugging with the Python Debugger. -""" -import py -import pdb, sys, linecache - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--pdb', - action="store_true", dest="usepdb", default=False, - help="start the interactive Python debugger on errors.") - -def pytest_configure(config): - if config.getvalue("usepdb"): - config.pluginmanager.register(PdbInvoke(), 'pdb') - -class PdbInvoke: - def pytest_runtest_makereport(self, item, call): - if call.excinfo and not \ - call.excinfo.errisinstance(py.test.skip.Exception): - # play well with capturing, slightly hackish - capman = item.config.pluginmanager.getplugin('capturemanager') - capman.suspendcapture() - - tw = py.io.TerminalWriter() - repr = call.excinfo.getrepr() - repr.toterminal(tw) - post_mortem(call.excinfo._excinfo[2]) - - capman.resumecapture_item(item) - -class Pdb(py.std.pdb.Pdb): - def do_list(self, arg): - self.lastcmd = 'list' - last = None - if arg: - try: - x = eval(arg, {}, {}) - if type(x) == type(()): - first, last = x - first = int(first) - last = int(last) - if last < first: - # Assume it's a count - last = first + last - else: - first = max(1, int(x) - 5) - except: - print ('*** Error in argument: %s' % repr(arg)) - return - elif self.lineno is None: - first = max(1, self.curframe.f_lineno - 5) - else: - first = self.lineno + 1 - if last is None: - last = first + 10 - filename = self.curframe.f_code.co_filename - breaklist = self.get_file_breaks(filename) - try: - for lineno in range(first, last+1): - # start difference from normal do_line - line = self._getline(filename, lineno) - # end difference from normal do_line - if not line: - print ('[EOF]') - break - else: - s = repr(lineno).rjust(3) - if len(s) < 4: s = s + ' ' - if lineno in breaklist: s = s + 'B' - else: s = s + ' ' - if lineno == self.curframe.f_lineno: - s = s + '->' - sys.stdout.write(s + '\t' + line) - self.lineno = lineno - except KeyboardInterrupt: - pass - do_l = do_list - - def _getline(self, filename, lineno): - if hasattr(filename, "__source__"): - try: - return filename.__source__.lines[lineno - 1] + "\n" - except IndexError: - return None - return linecache.getline(filename, lineno) - - def get_stack(self, f, t): - # Modified from bdb.py to be able to walk the stack beyond generators, - # which does not work in the normal pdb :-( - stack, i = pdb.Pdb.get_stack(self, f, t) - if f is None: - i = max(0, len(stack) - 1) - while i and stack[i][0].f_locals.get("__tracebackhide__", False): - i-=1 - return stack, i - -def post_mortem(t): - p = Pdb() - p.reset() - p.interaction(None, t) - -def set_trace(): - # again, a copy of the version in pdb.py - Pdb().set_trace(sys._getframe().f_back) diff --git a/py/_plugin/pytest_runner.py b/py/_plugin/pytest_runner.py deleted file mode 100644 --- a/py/_plugin/pytest_runner.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -collect and run test items and create reports. -""" - -import py, sys - -def pytest_namespace(): - return { - 'raises' : raises, - 'skip' : skip, - 'importorskip' : importorskip, - 'fail' : fail, - 'xfail' : xfail, - 'exit' : exit, - } - -# -# pytest plugin hooks - -# XXX move to pytest_sessionstart and fix py.test owns tests -def pytest_configure(config): - config._setupstate = SetupState() - -def pytest_sessionfinish(session, exitstatus): - if hasattr(session.config, '_setupstate'): - hook = session.config.hook - rep = hook.pytest__teardown_final(session=session) - if rep: - hook.pytest__teardown_final_logerror(report=rep) - -def pytest_make_collect_report(collector): - result = excinfo = None - try: - result = collector._memocollect() - except KeyboardInterrupt: - raise - except: - excinfo = py.code.ExceptionInfo() - return CollectReport(collector, result, excinfo) - -def pytest_runtest_protocol(item): - runtestprotocol(item) - return True - -def runtestprotocol(item, log=True): - rep = call_and_report(item, "setup", log) - reports = [rep] - if rep.passed: - reports.append(call_and_report(item, "call", log)) - reports.append(call_and_report(item, "teardown", log)) - return reports - -def pytest_runtest_setup(item): - item.config._setupstate.prepare(item) - -def pytest_runtest_call(item): - if not item._deprecated_testexecution(): - item.runtest() - -def pytest_runtest_makereport(item, call): - return ItemTestReport(item, call.excinfo, call.when) - -def pytest_runtest_teardown(item): - item.config._setupstate.teardown_exact(item) - -def pytest__teardown_final(session): - call = CallInfo(session.config._setupstate.teardown_all, when="teardown") - if call.excinfo: - ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir) - call.excinfo.traceback = ntraceback.filter() - rep = TeardownErrorReport(call.excinfo) - return rep - -def pytest_report_teststatus(report): - if report.when in ("setup", "teardown"): - if report.failed: - # category, shortletter, verbose-word - return "error", "E", "ERROR" - elif report.skipped: - return "skipped", "s", "SKIPPED" - else: - return "", "", "" -# -# Implementation - -def call_and_report(item, when, log=True): - call = call_runtest_hook(item, when) - hook = item.ihook - report = hook.pytest_runtest_makereport(item=item, call=call) - if log and (when == "call" or not report.passed): - hook.pytest_runtest_logreport(report=report) - return report - -def call_runtest_hook(item, when): - hookname = "pytest_runtest_" + when - ihook = getattr(item.ihook, hookname) - return CallInfo(lambda: ihook(item=item), when=when) - -class CallInfo: - excinfo = None - def __init__(self, func, when): - self.when = when - try: - self.result = func() - except KeyboardInterrupt: - raise - except: - self.excinfo = py.code.ExceptionInfo() - - def __repr__(self): - if self.excinfo: - status = "exception: %s" % str(self.excinfo.value) - else: - status = "result: %r" % (self.result,) - return "" % (self.when, status) - -class BaseReport(object): - def __repr__(self): - l = ["%s=%s" %(key, value) - for key, value in self.__dict__.items()] - return "<%s %s>" %(self.__class__.__name__, " ".join(l),) - - def toterminal(self, out): - longrepr = self.longrepr - if hasattr(longrepr, 'toterminal'): - longrepr.toterminal(out) - else: - out.line(str(longrepr)) - -class ItemTestReport(BaseReport): - failed = passed = skipped = False - - def __init__(self, item, excinfo=None, when=None): - self.item = item - self.when = when - if item and when != "setup": - self.keywords = item.readkeywords() - else: - # if we fail during setup it might mean - # we are not able to access the underlying object - # this might e.g. happen if we are unpickled - # and our parent collector did not collect us - # (because it e.g. skipped for platform reasons) - self.keywords = {} - if not excinfo: - self.passed = True - self.shortrepr = "." - else: - if not isinstance(excinfo, py.code.ExceptionInfo): - self.failed = True - shortrepr = "?" - longrepr = excinfo - elif excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - shortrepr = "s" - longrepr = self.item._repr_failure_py(excinfo) - else: - self.failed = True - shortrepr = self.item.shortfailurerepr - if self.when == "call": - longrepr = self.item.repr_failure(excinfo) - else: # exception in setup or teardown - longrepr = self.item._repr_failure_py(excinfo) - shortrepr = shortrepr.lower() - self.shortrepr = shortrepr - self.longrepr = longrepr - - def __repr__(self): - status = (self.passed and "passed" or - self.skipped and "skipped" or - self.failed and "failed" or - "CORRUPT") - l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,] - if hasattr(self, 'node'): - l.append("txnode=%s" % self.node.gateway.id) - info = " " .join(map(str, l)) - return "" % info - - def getnode(self): - return self.item - -class CollectReport(BaseReport): - skipped = failed = passed = False - - def __init__(self, collector, result, excinfo=None): - self.collector = collector - if not excinfo: - self.passed = True - self.result = result - else: - style = "short" - if collector.config.getvalue("fulltrace"): - style = "long" - self.longrepr = self.collector._repr_failure_py(excinfo, - style=style) - if excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - self.reason = str(excinfo.value) - else: - self.failed = True - - def getnode(self): - return self.collector - -class TeardownErrorReport(BaseReport): - skipped = passed = False - failed = True - when = "teardown" - def __init__(self, excinfo): - self.longrepr = excinfo.getrepr(funcargs=True) - -class SetupState(object): - """ shared state for setting up/tearing down test items or collectors. """ - def __init__(self): - self.stack = [] - self._finalizers = {} - - def addfinalizer(self, finalizer, colitem): - """ attach a finalizer to the given colitem. - if colitem is None, this will add a finalizer that - is called at the end of teardown_all(). - """ - assert hasattr(finalizer, '__call__') - #assert colitem in self.stack - self._finalizers.setdefault(colitem, []).append(finalizer) - - def _pop_and_teardown(self): - colitem = self.stack.pop() - self._teardown_with_finalization(colitem) - - def _callfinalizers(self, colitem): - finalizers = self._finalizers.pop(colitem, None) - while finalizers: - fin = finalizers.pop() - fin() - - def _teardown_with_finalization(self, colitem): - self._callfinalizers(colitem) - if colitem: - colitem.teardown() - for colitem in self._finalizers: - assert colitem is None or colitem in self.stack - - def teardown_all(self): - while self.stack: - self._pop_and_teardown() - self._teardown_with_finalization(None) - assert not self._finalizers - - def teardown_exact(self, item): - if self.stack and item == self.stack[-1]: - self._pop_and_teardown() - else: - self._callfinalizers(item) - - def prepare(self, colitem): - """ setup objects along the collector chain to the test-method - and teardown previously setup objects.""" - needed_collectors = colitem.listchain() - while self.stack: - if self.stack == needed_collectors[:len(self.stack)]: - break - self._pop_and_teardown() - # check if the last collection node has raised an error - for col in self.stack: - if hasattr(col, '_prepare_exc'): - py.builtin._reraise(*col._prepare_exc) - for col in needed_collectors[len(self.stack):]: - self.stack.append(col) - try: - col.setup() - except Exception: - col._prepare_exc = sys.exc_info() - raise - -# ============================================================= -# Test OutcomeExceptions and helpers for creating them. - - -class OutcomeException(Exception): - """ OutcomeException and its subclass instances indicate and - contain info about test and collection outcomes. - """ - def __init__(self, msg=None, excinfo=None): - self.msg = msg - self.excinfo = excinfo - - def __repr__(self): - if self.msg: - return repr(self.msg) - return "<%s instance>" %(self.__class__.__name__,) - __str__ = __repr__ - -class Skipped(OutcomeException): - # XXX hackish: on 3k we fake to live in the builtins - # in order to have Skipped exception printing shorter/nicer - __module__ = 'builtins' - -class Failed(OutcomeException): - """ raised from an explicit call to py.test.fail() """ - __module__ = 'builtins' - -class XFailed(OutcomeException): - """ raised from an explicit call to py.test.xfail() """ - __module__ = 'builtins' - -class ExceptionFailure(Failed): - """ raised by py.test.raises on an exception-assertion mismatch. """ - def __init__(self, expr, expected, msg=None, excinfo=None): - Failed.__init__(self, msg=msg, excinfo=excinfo) - self.expr = expr - self.expected = expected - -class Exit(KeyboardInterrupt): - """ raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """ - def __init__(self, msg="unknown reason"): - self.msg = msg - KeyboardInterrupt.__init__(self, msg) - -# exposed helper methods - -def exit(msg): - """ exit testing process as if KeyboardInterrupt was triggered. """ - __tracebackhide__ = True - raise Exit(msg) - -exit.Exception = Exit - -def skip(msg=""): - """ skip an executing test with the given message. Note: it's usually - better use the py.test.mark.skipif marker to declare a test to be - skipped under certain conditions like mismatching platforms or - dependencies. See the pytest_skipping plugin for details. - """ - __tracebackhide__ = True - raise Skipped(msg=msg) - -skip.Exception = Skipped - -def fail(msg=""): - """ explicitely fail an currently-executing test with the given Message. """ - __tracebackhide__ = True - raise Failed(msg=msg) - -fail.Exception = Failed - -def xfail(reason=""): - """ xfail an executing test or setup functions, taking an optional - reason string. - """ - __tracebackhide__ = True - raise XFailed(reason) -xfail.Exception = XFailed - -def raises(ExpectedException, *args, **kwargs): - """ if args[0] is callable: raise AssertionError if calling it with - the remaining arguments does not raise the expected exception. - if args[0] is a string: raise AssertionError if executing the - the string in the calling scope does not raise expected exception. - for examples: - x = 5 - raises(TypeError, lambda x: x + 'hello', x=x) - raises(TypeError, "x + 'hello'") - """ - __tracebackhide__ = True - assert args - if isinstance(args[0], str): - code, = args - assert isinstance(code, str) - frame = sys._getframe(1) - loc = frame.f_locals.copy() - loc.update(kwargs) - #print "raises frame scope: %r" % frame.f_locals - try: - code = py.code.Source(code).compile() - py.builtin.exec_(code, frame.f_globals, loc) - # XXX didn'T mean f_globals == f_locals something special? - # this is destroyed here ... - except ExpectedException: - return py.code.ExceptionInfo() - else: - func = args[0] - try: - func(*args[1:], **kwargs) - except ExpectedException: - return py.code.ExceptionInfo() - k = ", ".join(["%s=%r" % x for x in kwargs.items()]) - if k: - k = ', ' + k - expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k) - raise ExceptionFailure(msg="DID NOT RAISE", - expr=args, expected=ExpectedException) - -raises.Exception = ExceptionFailure - -def importorskip(modname, minversion=None): - """ return imported module if it has a higher __version__ than the - optionally specified 'minversion' - otherwise call py.test.skip() - with a message detailing the mismatch. - """ - compile(modname, '', 'eval') # to catch syntaxerrors - try: - mod = __import__(modname, None, None, ['__doc__']) - except ImportError: - py.test.skip("could not import %r" %(modname,)) - if minversion is None: - return mod - verattr = getattr(mod, '__version__', None) - if isinstance(minversion, str): - minver = minversion.split(".") - else: - minver = list(minversion) - if verattr is None or verattr.split(".") < minver: - py.test.skip("module %r has __version__ %r, required is: %r" %( - modname, verattr, minversion)) - return mod - diff --git a/py/_test/funcargs.py b/py/_test/funcargs.py deleted file mode 100644 --- a/py/_test/funcargs.py +++ /dev/null @@ -1,176 +0,0 @@ -import py - -def getfuncargnames(function): - argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0] - startindex = py.std.inspect.ismethod(function) and 1 or 0 - defaults = getattr(function, 'func_defaults', - getattr(function, '__defaults__', None)) or () - numdefaults = len(defaults) - if numdefaults: - return argnames[startindex:-numdefaults] - return argnames[startindex:] - -def fillfuncargs(function): - """ fill missing funcargs. """ - request = FuncargRequest(pyfuncitem=function) - request._fillfuncargs() - -def getplugins(node, withpy=False): # might by any node - plugins = node.config._getmatchingplugins(node.fspath) - if withpy: - mod = node.getparent(py.test.collect.Module) - if mod is not None: - plugins.append(mod.obj) - inst = node.getparent(py.test.collect.Instance) - if inst is not None: - plugins.append(inst.obj) - return plugins - -_notexists = object() -class CallSpec: - def __init__(self, funcargs, id, param): - self.funcargs = funcargs - self.id = id - if param is not _notexists: - self.param = param - def __repr__(self): - return "" %( - self.id, getattr(self, 'param', '?'), self.funcargs) - -class Metafunc: - def __init__(self, function, config=None, cls=None, module=None): - self.config = config - self.module = module - self.function = function - self.funcargnames = getfuncargnames(function) - self.cls = cls - self.module = module - self._calls = [] - self._ids = py.builtin.set() - - def addcall(self, funcargs=None, id=_notexists, param=_notexists): - assert funcargs is None or isinstance(funcargs, dict) - if id is None: - raise ValueError("id=None not allowed") - if id is _notexists: - id = len(self._calls) - id = str(id) - if id in self._ids: - raise ValueError("duplicate id %r" % id) - self._ids.add(id) - self._calls.append(CallSpec(funcargs, id, param)) - -class FuncargRequest: - _argprefix = "pytest_funcarg__" - _argname = None - - class LookupError(LookupError): - """ error on performing funcarg request. """ - - def __init__(self, pyfuncitem): - self._pyfuncitem = pyfuncitem - self.function = pyfuncitem.obj - self.module = pyfuncitem.getparent(py.test.collect.Module).obj - clscol = pyfuncitem.getparent(py.test.collect.Class) - self.cls = clscol and clscol.obj or None - self.instance = py.builtin._getimself(self.function) - self.config = pyfuncitem.config - self.fspath = pyfuncitem.fspath - if hasattr(pyfuncitem, '_requestparam'): - self.param = pyfuncitem._requestparam - self._plugins = getplugins(pyfuncitem, withpy=True) - self._funcargs = self._pyfuncitem.funcargs.copy() - self._name2factory = {} - self._currentarg = None - - def _fillfuncargs(self): - argnames = getfuncargnames(self.function) - if argnames: - assert not getattr(self._pyfuncitem, '_args', None), ( - "yielded functions cannot have funcargs") - for argname in argnames: - if argname not in self._pyfuncitem.funcargs: - self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname) - - def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): - """ cache and return result of calling setup(). - - The requested argument name, the scope and the ``extrakey`` - determine the cache key. The scope also determines when - teardown(result) will be called. valid scopes are: - scope == 'function': when the single test function run finishes. - scope == 'module': when tests in a different module are run - scope == 'session': when tests of the session have run. - """ - if not hasattr(self.config, '_setupcache'): - self.config._setupcache = {} # XXX weakref? - cachekey = (self._currentarg, self._getscopeitem(scope), extrakey) - cache = self.config._setupcache - try: - val = cache[cachekey] - except KeyError: - val = setup() - cache[cachekey] = val - if teardown is not None: - def finalizer(): - del cache[cachekey] - teardown(val) - self._addfinalizer(finalizer, scope=scope) - return val - - def getfuncargvalue(self, argname): - try: - return self._funcargs[argname] - except KeyError: - pass - if argname not in self._name2factory: - self._name2factory[argname] = self.config.pluginmanager.listattr( - plugins=self._plugins, - attrname=self._argprefix + str(argname) - ) - #else: we are called recursively - if not self._name2factory[argname]: - self._raiselookupfailed(argname) - funcargfactory = self._name2factory[argname].pop() - oldarg = self._currentarg - self._currentarg = argname - try: - self._funcargs[argname] = res = funcargfactory(request=self) - finally: - self._currentarg = oldarg - return res - - def _getscopeitem(self, scope): - if scope == "function": - return self._pyfuncitem - elif scope == "module": - return self._pyfuncitem.getparent(py.test.collect.Module) - elif scope == "session": - return None - raise ValueError("unknown finalization scope %r" %(scope,)) - - def _addfinalizer(self, finalizer, scope): - colitem = self._getscopeitem(scope) - self.config._setupstate.addfinalizer( - finalizer=finalizer, colitem=colitem) - - def addfinalizer(self, finalizer): - """ call the given finalizer after test function finished execution. """ - self._addfinalizer(finalizer, scope="function") - - def __repr__(self): - return "" %(self._pyfuncitem) - - def _raiselookupfailed(self, argname): - available = [] - for plugin in self._plugins: - for name in vars(plugin): - if name.startswith(self._argprefix): - name = name[len(self._argprefix):] - if name not in available: - available.append(name) - fspath, lineno, msg = self._pyfuncitem.reportinfo() - msg = "LookupError: no factory found for function argument %r" % (argname,) - msg += "\n available funcargs: %s" %(", ".join(available),) - msg += "\n use 'py.test --funcargs [testpath]' for help on them." - raise self.LookupError(msg) diff --git a/py/_cmdline/pycountloc.py b/py/_cmdline/pycountloc.py deleted file mode 100755 --- a/py/_cmdline/pycountloc.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# hands on script to compute the non-empty Lines of Code -# for tests and non-test code - -"""\ -py.countloc [PATHS] - -Count (non-empty) lines of python code and number of python files recursively -starting from a list of paths given on the command line (starting from the -current working directory). Distinguish between test files and normal ones and -report them separately. -""" -import py - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - (options, args) = parser.parse_args() - countloc(args) - -def nodot(p): - return p.check(dotfile=0) - -class FileCounter(object): - def __init__(self): - self.file2numlines = {} - self.numlines = 0 - self.numfiles = 0 - - def addrecursive(self, directory, fil="*.py", rec=nodot): - for x in directory.visit(fil, rec): - self.addfile(x) - - def addfile(self, fn, emptylines=False): - if emptylines: - s = len(p.readlines()) - else: - s = 0 - for i in fn.readlines(): - if i.strip(): - s += 1 - self.file2numlines[fn] = s - self.numfiles += 1 - self.numlines += s - - def getnumlines(self, fil): - numlines = 0 - for path, value in self.file2numlines.items(): - if fil(path): - numlines += value - return numlines - - def getnumfiles(self, fil): - numfiles = 0 - for path in self.file2numlines: - if fil(path): - numfiles += 1 - return numfiles - -def get_loccount(locations=None): - if locations is None: - localtions = [py.path.local()] - counter = FileCounter() - for loc in locations: - counter.addrecursive(loc, '*.py', rec=nodot) - - def istestfile(p): - return p.check(fnmatch='test_*.py') - isnottestfile = lambda x: not istestfile(x) - - numfiles = counter.getnumfiles(isnottestfile) - numlines = counter.getnumlines(isnottestfile) - numtestfiles = counter.getnumfiles(istestfile) - numtestlines = counter.getnumlines(istestfile) - - return counter, numfiles, numlines, numtestfiles, numtestlines - -def countloc(paths=None): - if not paths: - paths = ['.'] - locations = [py.path.local(x) for x in paths] - (counter, numfiles, numlines, numtestfiles, - numtestlines) = get_loccount(locations) - - items = counter.file2numlines.items() - items.sort(lambda x,y: cmp(x[1], y[1])) - for x, y in items: - print("%3d %30s" % (y,x)) - - print("%30s %3d" %("number of testfiles", numtestfiles)) - print("%30s %3d" %("number of non-empty testlines", numtestlines)) - print("%30s %3d" %("number of files", numfiles)) - print("%30s %3d" %("number of non-empty lines", numlines)) - diff --git a/py/_cmdline/pyconvert_unittest.py b/py/_cmdline/pyconvert_unittest.py deleted file mode 100644 --- a/py/_cmdline/pyconvert_unittest.py +++ /dev/null @@ -1,253 +0,0 @@ -import re -import sys - -try: - import parser -except ImportError: - parser = None - -d={} -# d is the dictionary of unittest changes, keyed to the old name -# used by unittest. -# d[old][0] is the new replacement function. -# d[old][1] is the operator you will substitute, or '' if there is none. -# d[old][2] is the possible number of arguments to the unittest -# function. - -# Old Unittest Name new name operator # of args -d['assertRaises'] = ('raises', '', ['Any']) -d['fail'] = ('raise AssertionError', '', [0,1]) -d['assert_'] = ('assert', '', [1,2]) -d['failIf'] = ('assert not', '', [1,2]) -d['assertEqual'] = ('assert', ' ==', [2,3]) -d['failIfEqual'] = ('assert not', ' ==', [2,3]) -d['assertIn'] = ('assert', ' in', [2,3]) -d['assertNotIn'] = ('assert', ' not in', [2,3]) -d['assertNotEqual'] = ('assert', ' !=', [2,3]) -d['failUnlessEqual'] = ('assert', ' ==', [2,3]) -d['assertAlmostEqual'] = ('assert round', ' ==', [2,3,4]) -d['failIfAlmostEqual'] = ('assert not round', ' ==', [2,3,4]) -d['assertNotAlmostEqual'] = ('assert round', ' !=', [2,3,4]) -d['failUnlessAlmostEquals'] = ('assert round', ' ==', [2,3,4]) - -# the list of synonyms -d['failUnlessRaises'] = d['assertRaises'] -d['failUnless'] = d['assert_'] -d['assertEquals'] = d['assertEqual'] -d['assertNotEquals'] = d['assertNotEqual'] -d['assertAlmostEquals'] = d['assertAlmostEqual'] -d['assertNotAlmostEquals'] = d['assertNotAlmostEqual'] - -# set up the regular expressions we will need -leading_spaces = re.compile(r'^(\s*)') # this never fails - -pat = '' -for k in d.keys(): # this complicated pattern to match all unittests - pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever( - -old_names = re.compile(pat[1:]) -linesep='\n' # nobody will really try to convert files not read - # in text mode, will they? - - -def blocksplitter(fp): - '''split a file into blocks that are headed by functions to rename''' - - blocklist = [] - blockstring = '' - - for line in fp: - interesting = old_names.match(line) - if interesting : - if blockstring: - blocklist.append(blockstring) - blockstring = line # reset the block - else: - blockstring += line - - blocklist.append(blockstring) - return blocklist - -def rewrite_utest(block): - '''rewrite every block to use the new utest functions''' - - '''returns the rewritten unittest, unless it ran into problems, - in which case it just returns the block unchanged. - ''' - utest = old_names.match(block) - - if not utest: - return block - - old = utest.group(0).lstrip()[5:-1] # the name we want to replace - new = d[old][0] # the name of the replacement function - op = d[old][1] # the operator you will use , or '' if there is none. - possible_args = d[old][2] # a list of the number of arguments the - # unittest function could possibly take. - - if possible_args == ['Any']: # just rename assertRaises & friends - return re.sub('self.'+old, new, block) - - message_pos = possible_args[-1] - # the remaining unittests can have an optional message to print - # when they fail. It is always the last argument to the function. - - try: - indent, argl, trailer = decompose_unittest(old, block) - - except SyntaxError: # but we couldn't parse it! - return block - - argnum = len(argl) - if argnum not in possible_args: - # sanity check - this one isn't real either - return block - - elif argnum == message_pos: - message = argl[-1] - argl = argl[:-1] - else: - message = None - - if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail() - string = '' - if message: - message = ' ' + message - - elif message_pos is 4: # assertAlmostEqual & friends - try: - pos = argl[2].lstrip() - except IndexError: - pos = '7' # default if none is specified - string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op ) - - else: # assert_, assertEquals and all the rest - string = ' ' + op.join(argl) - - if message: - string = string + ',' + message - - return indent + new + string + trailer - -def decompose_unittest(old, block): - '''decompose the block into its component parts''' - - ''' returns indent, arglist, trailer - indent -- the indentation - arglist -- the arguments to the unittest function - trailer -- any extra junk after the closing paren, such as #commment - ''' - - indent = re.match(r'(\s*)', block).group() - pat = re.search('self.' + old + r'\(', block) - - args, trailer = get_expr(block[pat.end():], ')') - arglist = break_args(args, []) - - if arglist == ['']: # there weren't any - return indent, [], trailer - - for i in range(len(arglist)): - try: - parser.expr(arglist[i].lstrip('\t ')) - except SyntaxError: - if i == 0: - arglist[i] = '(' + arglist[i] + ')' - else: - arglist[i] = ' (' + arglist[i] + ')' - - return indent, arglist, trailer - -def break_args(args, arglist): - '''recursively break a string into a list of arguments''' - try: - first, rest = get_expr(args, ',') - if not rest: - return arglist + [first] - else: - return [first] + break_args(rest, arglist) - except SyntaxError: - return arglist + [args] - -def get_expr(s, char): - '''split a string into an expression, and the rest of the string''' - - pos=[] - for i in range(len(s)): - if s[i] == char: - pos.append(i) - if pos == []: - raise SyntaxError # we didn't find the expected char. Ick. - - for p in pos: - # make the python parser do the hard work of deciding which comma - # splits the string into two expressions - try: - parser.expr('(' + s[:p] + ')') - return s[:p], s[p+1:] - except SyntaxError: # It's not an expression yet - pass - raise SyntaxError # We never found anything that worked. - - -def main(): - import sys - import py - - usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]" - optparser = py.std.optparse.OptionParser(usage) - - def select_output (option, opt, value, optparser, **kw): - if hasattr(optparser, 'output'): - optparser.error( - 'Cannot combine -s -i and -c options. Use one only.') - else: - optparser.output = kw['output'] - - optparser.add_option("-s", "--stdout", action="callback", - callback=select_output, - callback_kwargs={'output':'stdout'}, - help="send your output to stdout") - - optparser.add_option("-i", "--inplace", action="callback", - callback=select_output, - callback_kwargs={'output':'inplace'}, - help="overwrite files in place") - - optparser.add_option("-c", "--copy", action="callback", - callback=select_output, - callback_kwargs={'output':'copy'}, - help="copy files ... fn.py --> fn_cp.py") - - options, args = optparser.parse_args() - - output = getattr(optparser, 'output', 'stdout') - - if output in ['inplace', 'copy'] and not args: - optparser.error( - '-i and -c option require at least one filename') - - if not args: - s = '' - for block in blocksplitter(sys.stdin): - s += rewrite_utest(block) - sys.stdout.write(s) - - else: - for infilename in args: # no error checking to see if we can open, etc. - infile = file(infilename) - s = '' - for block in blocksplitter(infile): - s += rewrite_utest(block) - if output == 'inplace': - outfile = file(infilename, 'w+') - elif output == 'copy': # yes, just go clobber any existing .cp - outfile = file (infilename[:-3]+ '_cp.py', 'w+') - else: - outfile = sys.stdout - - outfile.write(s) - - -if __name__ == '__main__': - main() diff --git a/py/_compat/dep_doctest.py b/py/_compat/dep_doctest.py deleted file mode 100644 --- a/py/_compat/dep_doctest.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", -stacklevel="apipkg") -doctest = py.std.doctest diff --git a/py/_test/__init__.py b/py/_test/__init__.py deleted file mode 100644 --- a/py/_test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -""" assertion and py.test helper API.""" diff --git a/py/_cmdline/__init__.py b/py/_cmdline/__init__.py deleted file mode 100644 --- a/py/_cmdline/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_plugin/pytest_unittest.py b/py/_plugin/pytest_unittest.py deleted file mode 100644 --- a/py/_plugin/pytest_unittest.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -automatically discover and run traditional "unittest.py" style tests. - -Usage ----------------- - -This plugin collects and runs Python `unittest.py style`_ tests. -It will automatically collect ``unittest.TestCase`` subclasses -and their ``test`` methods from the test modules of a project -(usually following the ``test_*.py`` pattern). - -This plugin is enabled by default. - -.. _`unittest.py style`: http://docs.python.org/library/unittest.html -""" -import py -import sys - -def pytest_pycollect_makeitem(collector, name, obj): - if 'unittest' not in sys.modules: - return # nobody derived unittest.TestCase - try: - isunit = issubclass(obj, py.std.unittest.TestCase) - except KeyboardInterrupt: - raise - except Exception: - pass - else: - if isunit: - return UnitTestCase(name, parent=collector) - -class UnitTestCase(py.test.collect.Class): - def collect(self): - return [UnitTestCaseInstance("()", self)] - - def setup(self): - pass - - def teardown(self): - pass - -_dummy = object() -class UnitTestCaseInstance(py.test.collect.Instance): - def collect(self): - loader = py.std.unittest.TestLoader() - names = loader.getTestCaseNames(self.obj.__class__) - l = [] - for name in names: - callobj = getattr(self.obj, name) - if py.builtin.callable(callobj): - l.append(UnitTestFunction(name, parent=self)) - return l - - def _getobj(self): - x = self.parent.obj - return self.parent.obj(methodName='run') - -class UnitTestFunction(py.test.collect.Function): - def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None): - super(UnitTestFunction, self).__init__(name, parent) - self._args = args - if obj is not _dummy: - self._obj = obj - self._sort_value = sort_value - if hasattr(self.parent, 'newinstance'): - self.parent.newinstance() - self.obj = self._getobj() - - def runtest(self): - target = self.obj - args = self._args - target(*args) - - def setup(self): - instance = py.builtin._getimself(self.obj) - instance.setUp() - - def teardown(self): - instance = py.builtin._getimself(self.obj) - instance.tearDown() - diff --git a/py/_path/gateway/channeltest.py b/py/_path/gateway/channeltest.py deleted file mode 100644 --- a/py/_path/gateway/channeltest.py +++ /dev/null @@ -1,65 +0,0 @@ -import threading - - -class PathServer: - - def __init__(self, channel): - self.channel = channel - self.C2P = {} - self.next_id = 0 - threading.Thread(target=self.serve).start() - - def p2c(self, path): - id = self.next_id - self.next_id += 1 - self.C2P[id] = path - return id - - def command_LIST(self, id, *args): - path = self.C2P[id] - answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)] - self.channel.send(answer) - - def command_DEL(self, id): - del self.C2P[id] - - def command_GET(self, id, spec): - path = self.C2P[id] - self.channel.send(path._getbyspec(spec)) - - def command_READ(self, id): - path = self.C2P[id] - self.channel.send(path.read()) - - def command_JOIN(self, id, resultid, *args): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.join(*args) - - def command_DIRPATH(self, id, resultid): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.dirpath() - - def serve(self): - try: - while 1: - msg = self.channel.receive() - meth = getattr(self, 'command_' + msg[0]) - meth(*msg[1:]) - except EOFError: - pass - -if __name__ == '__main__': - import py - gw = execnet.PopenGateway() - channel = gw._channelfactory.new() - srv = PathServer(channel) - c = gw.remote_exec(""" - import remotepath - p = remotepath.RemotePath(channel.receive(), channel.receive()) - channel.send(len(p.listdir())) - """) - c.send(channel) - c.send(srv.p2c(py.path.local('/tmp'))) - print(c.receive()) diff --git a/pypy/tool/test/conftest1_innertest.py b/pypy/tool/test/conftest1_innertest.py deleted file mode 100644 --- a/pypy/tool/test/conftest1_innertest.py +++ /dev/null @@ -1,15 +0,0 @@ - -def test_something(space): - assert space.w_None is space.w_None - -def app_test_something(): - assert 42 == 42 - -class AppTestSomething: - def test_method_app(self): - assert 23 == 23 - -class TestSomething: - def test_method(self): - assert self.space - diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -229,6 +229,14 @@ op = ResOperation(opnum, args, result) self.optimizer.pure_operations[self.optimizer.make_args_key(op)] = op + def has_pure_result(self, opnum, args, descr): + op = ResOperation(opnum, args, None) + key = self.optimizer.make_args_key(op) + op = self.optimizer.pure_operations.get(key, None) + if op is None: + return False + return op.getdescr() is descr + def setup(self): pass diff --git a/py/_plugin/pytest_pastebin.py b/py/_plugin/pytest_pastebin.py deleted file mode 100644 --- a/py/_plugin/pytest_pastebin.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -submit failure or test session information to a pastebin service. - -Usage ----------- - -**Creating a URL for each test failure**:: - - py.test --pastebin=failed - -This will submit test run information to a remote Paste service and -provide a URL for each failure. You may select tests as usual or add -for example ``-x`` if you only want to send one particular failure. - -**Creating a URL for a whole test session log**:: - - py.test --pastebin=all - -Currently only pasting to the http://paste.pocoo.org service is implemented. - -""" -import py, sys - -class url: - base = "http://paste.pocoo.org" - xmlrpc = base + "/xmlrpc/" - show = base + "/show/" - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group._addoption('--pastebin', metavar="mode", - action='store', dest="pastebin", default=None, - type="choice", choices=['failed', 'all'], - help="send failed|all info to Pocoo pastebin service.") - -def pytest_configure(__multicall__, config): - import tempfile - __multicall__.execute() - if config.option.pastebin == "all": - config._pastebinfile = tempfile.TemporaryFile('w+') - tr = config.pluginmanager.getplugin('terminalreporter') - oldwrite = tr._tw.write - def tee_write(s, **kwargs): - oldwrite(s, **kwargs) - config._pastebinfile.write(str(s)) - tr._tw.write = tee_write - -def pytest_unconfigure(config): - if hasattr(config, '_pastebinfile'): - config._pastebinfile.seek(0) - sessionlog = config._pastebinfile.read() - config._pastebinfile.close() - del config._pastebinfile - proxyid = getproxy().newPaste("python", sessionlog) - pastebinurl = "%s%s" % (url.show, proxyid) - sys.stderr.write("pastebin session-log: %s\n" % pastebinurl) - tr = config.pluginmanager.getplugin('terminalreporter') - del tr._tw.__dict__['write'] - -def getproxy(): - return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes - -def pytest_terminal_summary(terminalreporter): - if terminalreporter.config.option.pastebin != "failed": - return - tr = terminalreporter - if 'failed' in tr.stats: - terminalreporter.write_sep("=", "Sending information to Paste Service") - if tr.config.option.debug: - terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,)) - serverproxy = getproxy() - for rep in terminalreporter.stats.get('failed'): - try: - msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc - except AttributeError: - msg = tr._getfailureheadline(rep) - tw = py.io.TerminalWriter(stringio=True) - rep.toterminal(tw) - s = tw.stringio.getvalue() - assert len(s) - proxyid = serverproxy.newPaste("python", s) - pastebinurl = "%s%s" % (url.show, proxyid) - tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/py/_test/collect.py b/py/_test/collect.py deleted file mode 100644 --- a/py/_test/collect.py +++ /dev/null @@ -1,418 +0,0 @@ -""" -test collection nodes, forming a tree, Items are leafs. -""" -import py - -def configproperty(name): - def fget(self): - #print "retrieving %r property from %s" %(name, self.fspath) - return self.config._getcollectclass(name, self.fspath) - return property(fget) - -class HookProxy: - def __init__(self, node): - self.node = node - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - hookmethod = getattr(self.node.config.hook, name) - def call_matching_hooks(**kwargs): - plugins = self.node.config._getmatchingplugins(self.node.fspath) - return hookmethod.pcall(plugins, **kwargs) - return call_matching_hooks - -class Node(object): - """ base class for all Nodes in the collection tree. - Collector subclasses have children, Items are terminal nodes. - """ - def __init__(self, name, parent=None, config=None): - self.name = name - self.parent = parent - self.config = config or parent.config - self.fspath = getattr(parent, 'fspath', None) - self.ihook = HookProxy(self) - - def _reraiseunpicklingproblem(self): - if hasattr(self, '_unpickle_exc'): - py.builtin._reraise(*self._unpickle_exc) - - # - # note to myself: Pickling is uh. - # - def __getstate__(self): - return (self.name, self.parent) - def __setstate__(self, nameparent): - name, parent = nameparent - try: - colitems = parent._memocollect() - for colitem in colitems: - if colitem.name == name: - # we are a copy that will not be returned - # by our parent - self.__dict__ = colitem.__dict__ - break - else: - raise ValueError("item %r not found in parent collection %r" %( - name, [x.name for x in colitems])) - except KeyboardInterrupt: - raise - except Exception: - # our parent can't collect us but we want unpickling to - # otherwise continue - self._reraiseunpicklingproblem() will - # reraise the problem - self._unpickle_exc = py.std.sys.exc_info() - self.name = name - self.parent = parent - self.config = parent.config - - def __repr__(self): - if getattr(self.config.option, 'debug', False): - return "<%s %r %0x>" %(self.__class__.__name__, - getattr(self, 'name', None), id(self)) - else: - return "<%s %r>" %(self.__class__.__name__, - getattr(self, 'name', None)) - - # methods for ordering nodes - - def __eq__(self, other): - if not isinstance(other, Node): - return False - return self.name == other.name and self.parent == other.parent - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.name, self.parent)) - - def setup(self): - pass - - def teardown(self): - pass - - def _memoizedcall(self, attrname, function): - exattrname = "_ex_" + attrname - failure = getattr(self, exattrname, None) - if failure is not None: - py.builtin._reraise(failure[0], failure[1], failure[2]) - if hasattr(self, attrname): - return getattr(self, attrname) - try: - res = function() - except (KeyboardInterrupt, SystemExit): - raise - except: - failure = py.std.sys.exc_info() - setattr(self, exattrname, failure) - raise - setattr(self, attrname, res) - return res - - def listchain(self): - """ return list of all parent collectors up to self, - starting from root of collection tree. """ - l = [self] - while 1: - x = l[0] - if x.parent is not None and x.parent.parent is not None: - l.insert(0, x.parent) - else: - return l - - def listnames(self): - return [x.name for x in self.listchain()] - - def getparent(self, cls): - current = self - while current and not isinstance(current, cls): - current = current.parent - return current - - def readkeywords(self): - return dict([(x, True) for x in self._keywords()]) - - def _keywords(self): - return [self.name] - - def _skipbykeyword(self, keywordexpr): - """ return True if they given keyword expression means to - skip this collector/item. - """ - if not keywordexpr: - return - chain = self.listchain() - for key in filter(None, keywordexpr.split()): - eor = key[:1] == '-' - if eor: - key = key[1:] - if not (eor ^ self._matchonekeyword(key, chain)): - return True - - def _matchonekeyword(self, key, chain): - elems = key.split(".") - # XXX O(n^2), anyone cares? - chain = [item.readkeywords() for item in chain if item._keywords()] - for start, _ in enumerate(chain): - if start + len(elems) > len(chain): - return False - for num, elem in enumerate(elems): - for keyword in chain[num + start]: - ok = False - if elem in keyword: - ok = True - break - if not ok: - break - if num == len(elems) - 1 and ok: - return True - return False - - def _prunetraceback(self, traceback): - return traceback - - def _repr_failure_py(self, excinfo, style=None): - excinfo.traceback = self._prunetraceback(excinfo.traceback) - # XXX should excinfo.getrepr record all data and toterminal() - # process it? - if style is None: - if self.config.option.tbstyle == "short": - style = "short" - else: - style = "long" - return excinfo.getrepr(funcargs=True, - showlocals=self.config.option.showlocals, - style=style) - - repr_failure = _repr_failure_py - shortfailurerepr = "F" - -class Collector(Node): - """ - Collector instances create children through collect() - and thus iteratively build a tree. attributes:: - - parent: attribute pointing to the parent collector - (or None if this is the root collector) - name: basename of this collector object - """ - Directory = configproperty('Directory') - Module = configproperty('Module') - - def collect(self): - """ returns a list of children (items and collectors) - for this collection node. - """ - raise NotImplementedError("abstract") - - def collect_by_name(self, name): - """ return a child matching the given name, else None. """ - for colitem in self._memocollect(): - if colitem.name == name: - return colitem - - def repr_failure(self, excinfo, outerr=None): - """ represent a failure. """ - assert outerr is None, "XXX deprecated" - return self._repr_failure_py(excinfo) - - def _memocollect(self): - """ internal helper method to cache results of calling collect(). """ - return self._memoizedcall('_collected', self.collect) - - # ********************************************************************** - # DEPRECATED METHODS - # ********************************************************************** - - def _deprecated_collect(self): - # avoid recursion: - # collect -> _deprecated_collect -> custom run() -> - # super().run() -> collect - attrname = '_depcollectentered' - if hasattr(self, attrname): - return - setattr(self, attrname, True) - method = getattr(self.__class__, 'run', None) - if method is not None and method != Collector.run: - warnoldcollect(function=method) - names = self.run() - return [x for x in [self.join(name) for name in names] if x] - - def run(self): - """ DEPRECATED: returns a list of names available from this collector. - You can return an empty list. Callers of this method - must take care to catch exceptions properly. - """ - return [colitem.name for colitem in self._memocollect()] - - def join(self, name): - """ DEPRECATED: return a child collector or item for the given name. - If the return value is None there is no such child. - """ - return self.collect_by_name(name) - - def _prunetraceback(self, traceback): - if hasattr(self, 'fspath'): - path = self.fspath - ntraceback = traceback.cut(path=self.fspath) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - -class FSCollector(Collector): - def __init__(self, fspath, parent=None, config=None): - fspath = py.path.local(fspath) - super(FSCollector, self).__init__(fspath.basename, parent, config=config) - self.fspath = fspath - - def __getstate__(self): - # RootCollector.getbynames() inserts a directory which we need - # to throw out here for proper re-instantiation - if isinstance(self.parent.parent, RootCollector): - assert self.parent.fspath == self.parent.parent.fspath, self.parent - return (self.name, self.parent.parent) # shortcut - return super(Collector, self).__getstate__() - -class File(FSCollector): - """ base class for collecting tests from a file. """ - -class Directory(FSCollector): - def recfilter(self, path): - if path.check(dir=1, dotfile=0): - return path.basename not in ('CVS', '_darcs', '{arch}') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - l = [] - for path in self.fspath.listdir(sort=True): - res = self.consider(path) - if res is not None: - if isinstance(res, (list, tuple)): - l.extend(res) - else: - l.append(res) - return l - - def consider(self, path): - if self.ihook.pytest_ignore_collect(path=path, config=self.config): - return - if path.check(file=1): - res = self.consider_file(path) - elif path.check(dir=1): - res = self.consider_dir(path) - else: - res = None - if isinstance(res, list): - # throw out identical results - l = [] - for x in res: - if x not in l: - assert x.parent == self, (x.parent, self) - assert x.fspath == path, (x.fspath, path) - l.append(x) - res = l - return res - - def consider_file(self, path): - return self.ihook.pytest_collect_file(path=path, parent=self) - - def consider_dir(self, path, usefilters=None): - if usefilters is not None: - py.log._apiwarn("0.99", "usefilters argument not needed") - return self.ihook.pytest_collect_directory(path=path, parent=self) - -class Item(Node): - """ a basic test item. """ - def _deprecated_testexecution(self): - if self.__class__.run != Item.run: - warnoldtestrun(function=self.run) - elif self.__class__.execute != Item.execute: - warnoldtestrun(function=self.execute) - else: - return False - self.run() - return True - - def run(self): - """ deprecated, here because subclasses might call it. """ - return self.execute(self.obj) - - def execute(self, obj): - """ deprecated, here because subclasses might call it. """ - return obj() - - def reportinfo(self): - return self.fspath, None, "" - -def warnoldcollect(function=None): - py.log._apiwarn("1.0", - "implement collector.collect() instead of " - "collector.run() and collector.join()", - stacklevel=2, function=function) - -def warnoldtestrun(function=None): - py.log._apiwarn("1.0", - "implement item.runtest() instead of " - "item.run() and item.execute()", - stacklevel=2, function=function) - - - -class RootCollector(Directory): - def __init__(self, config): - Directory.__init__(self, config.topdir, parent=None, config=config) - self.name = None - - def __repr__(self): - return "" %(self.fspath,) - - def getbynames(self, names): - current = self.consider(self.config.topdir) - while names: - name = names.pop(0) - if name == ".": # special "identity" name - continue - l = [] - for x in current._memocollect(): - if x.name == name: - l.append(x) - elif x.fspath == current.fspath.join(name): - l.append(x) - elif x.name == "()": - names.insert(0, name) - l.append(x) - break - if not l: - raise ValueError("no node named %r below %r" %(name, current)) - current = l[0] - return current - - def totrail(self, node): - chain = node.listchain() - names = [self._getrelpath(chain[0].fspath)] - names += [x.name for x in chain[1:]] - return names - - def fromtrail(self, trail): - return self.config._rootcol.getbynames(trail) - - def _getrelpath(self, fspath): - topdir = self.config.topdir - relpath = fspath.relto(topdir) - if not relpath: - if fspath == topdir: - relpath = "." - else: - raise ValueError("%r not relative to topdir %s" - %(self.fspath, topdir)) - return relpath - - def __getstate__(self): - return self.config - - def __setstate__(self, config): - self.__init__(config) diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -1,7 +1,7 @@ from pypy.jit.metainterp.optimizeopt.optimizer import Optimization, CONST_1, CONST_0 from pypy.jit.metainterp.optimizeutil import _findall from pypy.jit.metainterp.optimizeopt.intutils import IntBound, IntUnbounded, \ - IntLowerBound + IntLowerBound, IntUpperBound from pypy.jit.metainterp.history import Const, ConstInt from pypy.jit.metainterp.resoperation import rop, ResOperation @@ -31,7 +31,7 @@ self.nextop = op op = self.posponedop self.posponedop = None - + opnum = op.getopnum() for value, func in optimize_ops: if opnum == value: @@ -40,7 +40,7 @@ else: assert not op.is_ovf() self.emit_operation(op) - + def propagate_bounds_backward(self, box): # FIXME: This takes care of the instruction where box is the reuslt @@ -136,10 +136,12 @@ r = self.getvalue(op.result) b = v1.intbound.lshift_bound(v2.intbound) r.intbound.intersect(b) - if b.has_lower and b.has_upper: - # Synthesize the reverse op for optimize_default to reuse - self.pure(rop.INT_RSHIFT, [op.result, op.getarg(1)], op.getarg(0)) - + # --- The following is actually wrong if the INT_LSHIFT overflowed. + # --- It is precisely the pattern we use to detect overflows of the + # --- app-level '<<' operator: INT_LSHIFT/INT_RSHIFT/INT_EQ + #if b.has_lower and b.has_upper: + # # Synthesize the reverse op for optimize_default to reuse + # self.pure(rop.INT_RSHIFT, [op.result, op.getarg(1)], op.getarg(0)) def optimize_INT_RSHIFT(self, op): v1 = self.getvalue(op.getarg(0)) @@ -166,7 +168,7 @@ # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_ADD, op.getarglist()[:], op.result) self.optimizer.overflow_guarded[op] = True - + def optimize_INT_SUB_OVF(self, op): v1 = self.getvalue(op.getarg(0)) @@ -186,7 +188,7 @@ # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_SUB, op.getarglist()[:], op.result) self.optimizer.overflow_guarded[op] = True - + def optimize_INT_MUL_OVF(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) @@ -205,7 +207,7 @@ # Synthesize the non overflowing op for optimize_default to reuse self.pure(rop.INT_MUL, op.getarglist()[:], op.result) self.optimizer.overflow_guarded[op] = True - + def optimize_INT_LT(self, op): v1 = self.getvalue(op.getarg(0)) @@ -276,6 +278,12 @@ v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) + def optimize_STRGETITEM(self, op): + self.emit_operation(op) + v1 = self.getvalue(op.result) + v1.intbound.make_ge(IntLowerBound(0)) + v1.intbound.make_lt(IntUpperBound(256)) + optimize_STRLEN = optimize_ARRAYLEN_GC optimize_UNICODELEN = optimize_ARRAYLEN_GC @@ -315,7 +323,7 @@ if r.box.same_constant(CONST_1): self.make_int_gt(op.getarg(0), op.getarg(1)) else: - self.make_int_le(op.getarg(0), op.getarg(1)) + self.make_int_le(op.getarg(0), op.getarg(1)) def propagate_bounds_INT_LE(self, op): r = self.getvalue(op.result) diff --git a/py/_plugin/pytest_capture.py b/py/_plugin/pytest_capture.py deleted file mode 100644 --- a/py/_plugin/pytest_capture.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -configurable per-test stdout/stderr capturing mechanisms. - -This plugin captures stdout/stderr output for each test separately. -In case of test failures this captured output is shown grouped -togtther with the test. - -The plugin also provides test function arguments that help to -assert stdout/stderr output from within your tests, see the -`funcarg example`_. - - -Capturing of input/output streams during tests ---------------------------------------------------- - -By default ``sys.stdout`` and ``sys.stderr`` are substituted with -temporary streams during the execution of tests and setup/teardown code. -During the whole testing process it will re-use the same temporary -streams allowing to play well with the logging module which easily -takes ownership on these streams. - -Also, 'sys.stdin' is substituted with a file-like "null" object that -does not return any values. This is to immediately error out -on tests that wait on reading something from stdin. - -You can influence output capturing mechanisms from the command line:: - - py.test -s # disable all capturing - py.test --capture=sys # replace sys.stdout/stderr with in-mem files - py.test --capture=fd # point filedescriptors 1 and 2 to temp file - -If you set capturing values in a conftest file like this:: - - # conftest.py - option_capture = 'fd' - -then all tests in that directory will execute with "fd" style capturing. - -sys-level capturing ------------------------------------------- - -Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` -will be replaced with in-memory files (``py.io.TextIO`` to be precise) -that capture writes and decode non-unicode strings to a unicode object -(using a default, usually, UTF-8, encoding). - -FD-level capturing and subprocesses ------------------------------------------- - -The ``fd`` based method means that writes going to system level files -based on the standard file descriptors will be captured, for example -writes such as ``os.write(1, 'hello')`` will be captured properly. -Capturing on fd-level will include output generated from -any subprocesses created during a test. - -.. _`funcarg example`: - -Example Usage of the capturing Function arguments ---------------------------------------------------- - -You can use the `capsys funcarg`_ and `capfd funcarg`_ to -capture writes to stdout and stderr streams. Using the -funcargs frees your test from having to care about setting/resetting -the old streams and also interacts well with py.test's own -per-test capturing. Here is an example test function: - -.. sourcecode:: python - - def test_myoutput(capsys): - print ("hello") - sys.stderr.write("world\\n") - out, err = capsys.readouterr() - assert out == "hello\\n" - assert err == "world\\n" - print "next" - out, err = capsys.readouterr() - assert out == "next\\n" - -The ``readouterr()`` call snapshots the output so far - -and capturing will be continued. After the test -function finishes the original streams will -be restored. If you want to capture on -the filedescriptor level you can use the ``capfd`` function -argument which offers the same interface. -""" - -import py -import os - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--capture', action="store", default=None, - metavar="method", type="choice", choices=['fd', 'sys', 'no'], - help="per-test capturing method: one of fd (default)|sys|no.") - group._addoption('-s', action="store_const", const="no", dest="capture", - help="shortcut for --capture=no.") - -def addouterr(rep, outerr): - repr = getattr(rep, 'longrepr', None) - if not hasattr(repr, 'addsection'): - return - for secname, content in zip(["out", "err"], outerr): - if content: - repr.addsection("Captured std%s" % secname, content.rstrip()) - -def pytest_configure(config): - config.pluginmanager.register(CaptureManager(), 'capturemanager') - -class NoCapture: - def startall(self): - pass - def resume(self): - pass - def suspend(self): - return "", "" - -class CaptureManager: - def __init__(self): - self._method2capture = {} - - def _maketempfile(self): - f = py.std.tempfile.TemporaryFile() - newf = py.io.dupfile(f, encoding="UTF-8") - return newf - - def _makestringio(self): - return py.io.TextIO() - - def _getcapture(self, method): - if method == "fd": - return py.io.StdCaptureFD(now=False, - out=self._maketempfile(), err=self._maketempfile() - ) - elif method == "sys": - return py.io.StdCapture(now=False, - out=self._makestringio(), err=self._makestringio() - ) - elif method == "no": - return NoCapture() - else: - raise ValueError("unknown capturing method: %r" % method) - - def _getmethod(self, config, fspath): - if config.option.capture: - method = config.option.capture - else: - try: - method = config._conftest.rget("option_capture", path=fspath) - except KeyError: - method = "fd" - if method == "fd" and not hasattr(os, 'dup'): # e.g. jython - method = "sys" - return method - - def resumecapture_item(self, item): - method = self._getmethod(item.config, item.fspath) - if not hasattr(item, 'outerr'): - item.outerr = ('', '') # we accumulate outerr on the item - return self.resumecapture(method) - - def resumecapture(self, method): - if hasattr(self, '_capturing'): - raise ValueError("cannot resume, already capturing with %r" % - (self._capturing,)) - cap = self._method2capture.get(method) - self._capturing = method - if cap is None: - self._method2capture[method] = cap = self._getcapture(method) - cap.startall() - else: - cap.resume() - - def suspendcapture(self, item=None): - self.deactivate_funcargs() - if hasattr(self, '_capturing'): - method = self._capturing - cap = self._method2capture.get(method) - if cap is not None: - outerr = cap.suspend() - del self._capturing - if item: - outerr = (item.outerr[0] + outerr[0], - item.outerr[1] + outerr[1]) - return outerr - return "", "" - - def activate_funcargs(self, pyfuncitem): - if not hasattr(pyfuncitem, 'funcargs'): - return - assert not hasattr(self, '_capturing_funcargs') - self._capturing_funcargs = capturing_funcargs = [] - for name, capfuncarg in pyfuncitem.funcargs.items(): - if name in ('capsys', 'capfd'): - capturing_funcargs.append(capfuncarg) - capfuncarg._start() - - def deactivate_funcargs(self): - capturing_funcargs = getattr(self, '_capturing_funcargs', None) - if capturing_funcargs is not None: - while capturing_funcargs: - capfuncarg = capturing_funcargs.pop() - capfuncarg._finalize() - del self._capturing_funcargs - - def pytest_make_collect_report(self, __multicall__, collector): - method = self._getmethod(collector.config, collector.fspath) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - addouterr(rep, outerr) - return rep - - def pytest_runtest_setup(self, item): - self.resumecapture_item(item) - - def pytest_runtest_call(self, item): - self.resumecapture_item(item) - self.activate_funcargs(item) - - def pytest_runtest_teardown(self, item): - self.resumecapture_item(item) - - def pytest__teardown_final(self, __multicall__, session): - method = self._getmethod(session.config, None) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - if rep: - addouterr(rep, outerr) - return rep - - def pytest_keyboard_interrupt(self, excinfo): - if hasattr(self, '_capturing'): - self.suspendcapture() - - def pytest_runtest_makereport(self, __multicall__, item, call): - self.deactivate_funcargs() - rep = __multicall__.execute() - outerr = self.suspendcapture(item) - if not rep.passed: - addouterr(rep, outerr) - if not rep.passed or rep.when == "teardown": - outerr = ('', '') - item.outerr = outerr - return rep - -def pytest_funcarg__capsys(request): - """captures writes to sys.stdout/sys.stderr and makes - them available successively via a ``capsys.readouterr()`` method - which returns a ``(out, err)`` tuple of captured snapshot strings. - """ - return CaptureFuncarg(request, py.io.StdCapture) - -def pytest_funcarg__capfd(request): - """captures writes to file descriptors 1 and 2 and makes - snapshotted ``(out, err)`` string tuples available - via the ``capsys.readouterr()`` method. If the underlying - platform does not have ``os.dup`` (e.g. Jython) tests using - this funcarg will automatically skip. - """ - if not hasattr(os, 'dup'): - py.test.skip("capfd funcarg needs os.dup") - return CaptureFuncarg(request, py.io.StdCaptureFD) - - -class CaptureFuncarg: - def __init__(self, request, captureclass): - self._cclass = captureclass - self.capture = self._cclass(now=False) - #request.addfinalizer(self._finalize) - - def _start(self): - self.capture.startall() - - def _finalize(self): - if hasattr(self, 'capture'): - self.capture.reset() - del self.capture - - def readouterr(self): - return self.capture.readouterr() - - def close(self): - self._finalize() diff --git a/py/_compat/__init__.py b/py/_compat/__init__.py deleted file mode 100644 --- a/py/_compat/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" compatibility modules (taken from 2.4.4) """ - diff --git a/py/_compat/dep_subprocess.py b/py/_compat/dep_subprocess.py deleted file mode 100644 --- a/py/_compat/dep_subprocess.py +++ /dev/null @@ -1,5 +0,0 @@ - -import py -py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", -stacklevel="apipkg") -subprocess = py.std.subprocess diff --git a/py/_plugin/pytest_pylint.py b/py/_plugin/pytest_pylint.py deleted file mode 100644 --- a/py/_plugin/pytest_pylint.py +++ /dev/null @@ -1,36 +0,0 @@ -"""pylint plugin - -XXX: Currently in progress, NOT IN WORKING STATE. -""" -import py - -pylint = py.test.importorskip("pylint.lint") - -def pytest_addoption(parser): - group = parser.getgroup('pylint options') - group.addoption('--pylint', action='store_true', - default=False, dest='pylint', - help='run pylint on python files.') - -def pytest_collect_file(path, parent): - if path.ext == ".py": - if parent.config.getvalue('pylint'): - return PylintItem(path, parent) - -#def pytest_terminal_summary(terminalreporter): -# print 'placeholder for pylint output' - -class PylintItem(py.test.collect.Item): - def runtest(self): - capture = py.io.StdCaptureFD() - try: - linter = pylint.lint.PyLinter() - linter.check(str(self.fspath)) - finally: - out, err = capture.reset() - rating = out.strip().split('\n')[-1] - sys.stdout.write(">>>") - print(rating) - assert 0 - - diff --git a/py/_test/conftesthandle.py b/py/_test/conftesthandle.py deleted file mode 100644 --- a/py/_test/conftesthandle.py +++ /dev/null @@ -1,113 +0,0 @@ -import py - -class Conftest(object): - """ the single place for accessing values and interacting - towards conftest modules from py.test objects. - - (deprecated) - Note that triggering Conftest instances to import - conftest.py files may result in added cmdline options. - """ - def __init__(self, onimport=None, confcutdir=None): - self._path2confmods = {} - self._onimport = onimport - self._conftestpath2mod = {} - self._confcutdir = confcutdir - - def setinitial(self, args): - """ try to find a first anchor path for looking up global values - from conftests. This function is usually called _before_ - argument parsing. conftest files may add command line options - and we thus have no completely safe way of determining - which parts of the arguments are actually related to options - and which are file system paths. We just try here to get - bootstrapped ... - """ - current = py.path.local() - opt = '--confcutdir' - for i in range(len(args)): - opt1 = str(args[i]) - if opt1.startswith(opt): - if opt1 == opt: - if len(args) > i: - p = current.join(args[i+1], abs=True) - elif opt1.startswith(opt + "="): - p = current.join(opt1[len(opt)+1:], abs=1) - self._confcutdir = p - break - for arg in args + [current]: - anchor = current.join(arg, abs=1) - if anchor.check(): # we found some file object - self._path2confmods[None] = self.getconftestmodules(anchor) - # let's also consider test* dirs - if anchor.check(dir=1): - for x in anchor.listdir(lambda x: x.check(dir=1, dotfile=0)): - self.getconftestmodules(x) - break - else: - assert 0, "no root of filesystem?" - - def getconftestmodules(self, path): - """ return a list of imported conftest modules for the given path. """ - try: - clist = self._path2confmods[path] - except KeyError: - if path is None: - raise ValueError("missing default confest.") - dp = path.dirpath() - if dp == path: - clist = [] - else: - cutdir = self._confcutdir - clist = self.getconftestmodules(dp) - if cutdir and path != cutdir and not path.relto(cutdir): - pass - else: - conftestpath = path.join("conftest.py") - if conftestpath.check(file=1): - clist.append(self.importconftest(conftestpath)) - self._path2confmods[path] = clist - # be defensive: avoid changes from caller side to - # affect us by always returning a copy of the actual list - return clist[:] - - def rget(self, name, path=None): - mod, value = self.rget_with_confmod(name, path) - return value - - def rget_with_confmod(self, name, path=None): - modules = self.getconftestmodules(path) - modules.reverse() - for mod in modules: - try: - return mod, getattr(mod, name) - except AttributeError: - continue - raise KeyError(name) - - def importconftest(self, conftestpath): - assert conftestpath.check(), conftestpath - try: - return self._conftestpath2mod[conftestpath] - except KeyError: - if not conftestpath.dirpath('__init__.py').check(file=1): - # HACK: we don't want any "globally" imported conftest.py, - # prone to conflicts and subtle problems - modname = str(conftestpath).replace('.', conftestpath.sep) - mod = conftestpath.pyimport(modname=modname) - else: - mod = conftestpath.pyimport() - self._conftestpath2mod[conftestpath] = mod - dirpath = conftestpath.dirpath() - if dirpath in self._path2confmods: - for path, mods in self._path2confmods.items(): - if path and path.relto(dirpath) or path == dirpath: - assert mod not in mods - mods.append(mod) - self._postimport(mod) - return mod - - def _postimport(self, mod): - if self._onimport: - self._onimport(mod) - return mod diff --git a/py/_test/pycollect.py b/py/_test/pycollect.py deleted file mode 100644 --- a/py/_test/pycollect.py +++ /dev/null @@ -1,399 +0,0 @@ -""" -Python related collection nodes. -""" -import py -import inspect -from py._test.collect import configproperty, warnoldcollect -from py._test import funcargs -from py._code.code import TerminalRepr - -class PyobjMixin(object): - def obj(): - def fget(self): - try: - return self._obj - except AttributeError: - self._obj = obj = self._getobj() - return obj - def fset(self, value): - self._obj = value - return property(fget, fset, None, "underlying python object") - obj = obj() - - def _getobj(self): - return getattr(self.parent.obj, self.name) - - def getmodpath(self, stopatmodule=True, includemodule=False): - """ return python path relative to the containing module. """ - chain = self.listchain() - chain.reverse() - parts = [] - for node in chain: - if isinstance(node, Instance): - continue - name = node.name - if isinstance(node, Module): - assert name.endswith(".py") - name = name[:-3] - if stopatmodule: - if includemodule: - parts.append(name) - break - parts.append(name) - parts.reverse() - s = ".".join(parts) - return s.replace(".[", "[") - - def _getfslineno(self): - try: - return self._fslineno - except AttributeError: - pass - obj = self.obj - # xxx let decorators etc specify a sane ordering - if hasattr(obj, 'place_as'): - obj = obj.place_as - - self._fslineno = py.code.getfslineno(obj) - return self._fslineno - - def reportinfo(self): - fspath, lineno = self._getfslineno() - modpath = self.getmodpath() - return fspath, lineno, modpath - -class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): - Class = configproperty('Class') - Instance = configproperty('Instance') - Function = configproperty('Function') - Generator = configproperty('Generator') - - def funcnamefilter(self, name): - return name.startswith('test') - def classnamefilter(self, name): - return name.startswith('Test') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - # NB. we avoid random getattrs and peek in the __dict__ instead - dicts = [getattr(self.obj, '__dict__', {})] - for basecls in inspect.getmro(self.obj.__class__): - dicts.append(basecls.__dict__) - seen = {} - l = [] - for dic in dicts: - for name, obj in dic.items(): - if name in seen: - continue - seen[name] = True - if name[0] != "_": - res = self.makeitem(name, obj) - if res is None: - continue - if not isinstance(res, list): - res = [res] - l.extend(res) - l.sort(key=lambda item: item.reportinfo()[:2]) - return l - - def _deprecated_join(self, name): - if self.__class__.join != py.test.collect.Collector.join: - warnoldcollect() - return self.join(name) - - def makeitem(self, name, obj): - return self.ihook.pytest_pycollect_makeitem( - collector=self, name=name, obj=obj) - - def _istestclasscandidate(self, name, obj): - if self.classnamefilter(name) and \ - inspect.isclass(obj): - if hasinit(obj): - # XXX WARN - return False - return True - - def _genfunctions(self, name, funcobj): - module = self.getparent(Module).obj - clscol = self.getparent(Class) - cls = clscol and clscol.obj or None - metafunc = funcargs.Metafunc(funcobj, config=self.config, - cls=cls, module=module) - gentesthook = self.config.hook.pytest_generate_tests - plugins = funcargs.getplugins(self, withpy=True) - gentesthook.pcall(plugins, metafunc=metafunc) - if not metafunc._calls: - return self.Function(name, parent=self) - l = [] - for callspec in metafunc._calls: - subname = "%s[%s]" %(name, callspec.id) - function = self.Function(name=subname, parent=self, - callspec=callspec, callobj=funcobj) - l.append(function) - return l - -class Module(py.test.collect.File, PyCollectorMixin): - def _getobj(self): - return self._memoizedcall('_obj', self._importtestmodule) - - def _importtestmodule(self): - # we assume we are only called once per module - mod = self.fspath.pyimport() - #print "imported test module", mod - self.config.pluginmanager.consider_module(mod) - return mod - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - if hasattr(self.obj, 'setup_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.setup_module)[0]: - self.obj.setup_module(self.obj) - else: - self.obj.setup_module() - - def teardown(self): - if hasattr(self.obj, 'teardown_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.teardown_module)[0]: - self.obj.teardown_module(self.obj) - else: - self.obj.teardown_module() - -class Class(PyCollectorMixin, py.test.collect.Collector): - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - return [self.Instance(name="()", parent=self)] - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - setup_class = getattr(self.obj, 'setup_class', None) - if setup_class is not None: - setup_class = getattr(setup_class, 'im_func', setup_class) - setup_class(self.obj) - - def teardown(self): - teardown_class = getattr(self.obj, 'teardown_class', None) - if teardown_class is not None: - teardown_class = getattr(teardown_class, 'im_func', teardown_class) - teardown_class(self.obj) - -class Instance(PyCollectorMixin, py.test.collect.Collector): - def _getobj(self): - return self.parent.obj() - def Function(self): - return getattr(self.obj, 'Function', - PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2 - def _keywords(self): - return [] - Function = property(Function) - - #def __repr__(self): - # return "<%s of '%s'>" %(self.__class__.__name__, - # self.parent.obj.__name__) - - def newinstance(self): - self.obj = self._getobj() - return self.obj - -class FunctionMixin(PyobjMixin): - """ mixin for the code common to Function and Generator. - """ - - def setup(self): - """ perform setup for this test function. """ - if inspect.ismethod(self.obj): - name = 'setup_method' - else: - name = 'setup_function' - if isinstance(self.parent, Instance): - obj = self.parent.newinstance() - self.obj = self._getobj() - else: - obj = self.parent.obj - setup_func_or_method = getattr(obj, name, None) - if setup_func_or_method is not None: - setup_func_or_method(self.obj) - - def teardown(self): - """ perform teardown for this test function. """ - if inspect.ismethod(self.obj): - name = 'teardown_method' - else: - name = 'teardown_function' - obj = self.parent.obj - teardown_func_or_meth = getattr(obj, name, None) - if teardown_func_or_meth is not None: - teardown_func_or_meth(self.obj) - - def _prunetraceback(self, traceback): - if hasattr(self, '_obj') and not self.config.option.fulltrace: - code = py.code.Code(self.obj) - path, firstlineno = code.path, code.firstlineno - ntraceback = traceback.cut(path=path, firstlineno=firstlineno) - if ntraceback == traceback: - ntraceback = ntraceback.cut(path=path) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - - def _repr_failure_py(self, excinfo, style="long"): - if excinfo.errisinstance(funcargs.FuncargRequest.LookupError): - fspath, lineno, msg = self.reportinfo() - lines, _ = inspect.getsourcelines(self.obj) - for i, line in enumerate(lines): - if line.strip().startswith('def'): - return FuncargLookupErrorRepr(fspath, lineno, - lines[:i+1], str(excinfo.value)) - return super(FunctionMixin, self)._repr_failure_py(excinfo, - style=style) - - def repr_failure(self, excinfo, outerr=None): - assert outerr is None, "XXX outerr usage is deprecated" - return self._repr_failure_py(excinfo, - style=self.config.getvalue("tbstyle")) - - shortfailurerepr = "F" - -class FuncargLookupErrorRepr(TerminalRepr): - def __init__(self, filename, firstlineno, deflines, errorstring): - self.deflines = deflines - self.errorstring = errorstring - self.filename = filename - self.firstlineno = firstlineno - - def toterminal(self, tw): - tw.line() - for line in self.deflines: - tw.line(" " + line.strip()) - for line in self.errorstring.split("\n"): - tw.line(" " + line.strip(), red=True) - tw.line() - tw.line("%s:%d" % (self.filename, self.firstlineno+1)) - -class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): - def collect(self): - # test generators are seen as collectors but they also - # invoke setup/teardown on popular request - # (induced by the common "test_*" naming shared with normal tests) - self.config._setupstate.prepare(self) - l = [] - seen = {} - for i, x in enumerate(self.obj()): - name, call, args = self.getcallargs(x) - if not py.builtin.callable(call): - raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) - if name is None: - name = "[%d]" % i - else: - name = "['%s']" % name - if name in seen: - raise ValueError("%r generated tests with non-unique name %r" %(self, name)) - seen[name] = True - l.append(self.Function(name, self, args=args, callobj=call)) - return l - - def getcallargs(self, obj): - if not isinstance(obj, (tuple, list)): - obj = (obj,) - # explict naming - if isinstance(obj[0], py.builtin._basestring): - name = obj[0] - obj = obj[1:] - else: - name = None - call, args = obj[0], obj[1:] - return name, call, args - - -# -# Test Items -# -_dummy = object() -class Function(FunctionMixin, py.test.collect.Item): - """ a Function Item is responsible for setting up - and executing a Python callable test object. - """ - _genid = None - def __init__(self, name, parent=None, args=None, config=None, - callspec=None, callobj=_dummy): - super(Function, self).__init__(name, parent, config=config) - self._args = args - if self._isyieldedfunction(): - assert not callspec, "yielded functions (deprecated) cannot have funcargs" - else: - if callspec is not None: - self.funcargs = callspec.funcargs or {} - self._genid = callspec.id - if hasattr(callspec, "param"): - self._requestparam = callspec.param - else: - self.funcargs = {} - if callobj is not _dummy: - self._obj = callobj - self.function = getattr(self.obj, 'im_func', self.obj) - - def _getobj(self): - name = self.name - i = name.find("[") # parametrization - if i != -1: - name = name[:i] - return getattr(self.parent.obj, name) - - def _isyieldedfunction(self): - return self._args is not None - - def readkeywords(self): - d = super(Function, self).readkeywords() - d.update(py.builtin._getfuncdict(self.obj)) - return d - - def runtest(self): - """ execute the underlying test function. """ - self.ihook.pytest_pyfunc_call(pyfuncitem=self) - - def setup(self): - super(Function, self).setup() - if hasattr(self, 'funcargs'): - funcargs.fillfuncargs(self) - - def __eq__(self, other): - try: - return (self.name == other.name and - self._args == other._args and - self.parent == other.parent and - self.obj == other.obj and - getattr(self, '_genid', None) == - getattr(other, '_genid', None) - ) - except AttributeError: - pass - return False - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.parent, self.name)) - -def hasinit(obj): - init = getattr(obj, '__init__', None) - if init: - if init != object.__init__: - return True diff --git a/py/_plugin/pytest_skipping.py b/py/_plugin/pytest_skipping.py deleted file mode 100644 --- a/py/_plugin/pytest_skipping.py +++ /dev/null @@ -1,347 +0,0 @@ -""" -advanced skipping for python test functions, classes or modules. - -With this plugin you can mark test functions for conditional skipping -or as "xfail", expected-to-fail. Skipping a test will avoid running it -while xfail-marked tests will run and result in an inverted outcome: -a pass becomes a failure and a fail becomes a semi-passing one. - -The need for skipping a test is usually connected to a condition. -If a test fails under all conditions then it's probably better -to mark your test as 'xfail'. - -By passing ``-rxs`` to the terminal reporter you will see extra -summary information on skips and xfail-run tests at the end of a test run. - -.. _skipif: - -Skipping a single function -------------------------------------------- - -Here is an example for marking a test function to be skipped -when run on a Python3 interpreter:: - - @py.test.mark.skipif("sys.version_info >= (3,0)") - def test_function(): - ... - -During test function setup the skipif condition is -evaluated by calling ``eval(expr, namespace)``. The namespace -contains the ``sys`` and ``os`` modules and the test -``config`` object. The latter allows you to skip based -on a test configuration value e.g. like this:: - - @py.test.mark.skipif("not config.getvalue('db')") - def test_function(...): - ... - -Create a shortcut for your conditional skip decorator -at module level like this:: - - win32only = py.test.mark.skipif("sys.platform != 'win32'") - - @win32only - def test_function(): - ... - - -skip groups of test functions --------------------------------------- - -As with all metadata function marking you can do it at -`whole class- or module level`_. Here is an example -for skipping all methods of a test class based on platform:: - - class TestPosixCalls: - pytestmark = py.test.mark.skipif("sys.platform == 'win32'") - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -The ``pytestmark`` decorator will be applied to each test function. -If your code targets python2.6 or above you can equivalently use -the skipif decorator on classes:: - - @py.test.mark.skipif("sys.platform == 'win32'") - class TestPosixCalls: - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -It is fine in general to apply multiple "skipif" decorators -on a single function - this means that if any of the conditions -apply the function will be skipped. - -.. _`whole class- or module level`: mark.html#scoped-marking - - -mark a test function as **expected to fail** -------------------------------------------------------- - -You can use the ``xfail`` marker to indicate that you -expect the test to fail:: - - @py.test.mark.xfail - def test_function(): - ... - -This test will be run but no traceback will be reported -when it fails. Instead terminal reporting will list it in the -"expected to fail" or "unexpectedly passing" sections. - -Same as with skipif_ you can also selectively expect a failure -depending on platform:: - - @py.test.mark.xfail("sys.version_info >= (3,0)") - def test_function(): - ... - -To not run a test and still regard it as "xfailed":: - - @py.test.mark.xfail(..., run=False) - -To specify an explicit reason to be shown with xfailure detail:: - - @py.test.mark.xfail(..., reason="my reason") - -imperative xfail from within a test or setup function ------------------------------------------------------- - -If you cannot declare xfail-conditions at import time -you can also imperatively produce an XFail-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.xfail("unsuppored configuration") - - -skipping on a missing import dependency --------------------------------------------------- - -You can use the following import helper at module level -or within a test or test setup function:: - - docutils = py.test.importorskip("docutils") - -If ``docutils`` cannot be imported here, this will lead to a -skip outcome of the test. You can also skip dependeing if -if a library does not come with a high enough version:: - - docutils = py.test.importorskip("docutils", minversion="0.3") - -The version will be read from the specified module's ``__version__`` attribute. - -imperative skip from within a test or setup function ------------------------------------------------------- - -If for some reason you cannot declare skip-conditions -you can also imperatively produce a Skip-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.skip("unsuppored configuration") - -""" - -import py - -def pytest_addoption(parser): - group = parser.getgroup("general") - group.addoption('--runxfail', - action="store_true", dest="runxfail", default=False, - help="run tests even if they are marked xfail") - -class MarkEvaluator: - def __init__(self, item, name): - self.item = item - self.name = name - self.holder = getattr(item.obj, name, None) - - def __bool__(self): - return bool(self.holder) - __nonzero__ = __bool__ - - def istrue(self): - if self.holder: - d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} - if self.holder.args: - self.result = False - for expr in self.holder.args: - self.expr = expr - if isinstance(expr, str): - result = cached_eval(self.item.config, expr, d) - else: - result = expr - if result: - self.result = True - self.expr = expr - break - else: - self.result = True - return getattr(self, 'result', False) - - def get(self, attr, default=None): - return self.holder.kwargs.get(attr, default) - - def getexplanation(self): - expl = self.get('reason', None) - if not expl: - if not hasattr(self, 'expr'): - return "" - else: - return "condition: " + self.expr - return expl - - -def pytest_runtest_setup(item): - if not isinstance(item, py.test.collect.Function): - return - evalskip = MarkEvaluator(item, 'skipif') - if evalskip.istrue(): - py.test.skip(evalskip.getexplanation()) - item._evalxfail = MarkEvaluator(item, 'xfail') - if not item.config.getvalue("runxfail"): - if item._evalxfail.istrue(): - if not item._evalxfail.get('run', True): - py.test.skip("xfail") - -def pytest_runtest_makereport(__multicall__, item, call): - if not isinstance(item, py.test.collect.Function): - return - if not (call.excinfo and - call.excinfo.errisinstance(py.test.xfail.Exception)): - evalxfail = getattr(item, '_evalxfail', None) - if not evalxfail: - return - if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception): - if not item.config.getvalue("runxfail"): - rep = __multicall__.execute() - rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg - rep.skipped = True - rep.failed = False - return rep - if call.when == "setup": - rep = __multicall__.execute() - if rep.skipped and evalxfail.istrue(): - expl = evalxfail.getexplanation() - if not evalxfail.get("run", True): - expl = "[NOTRUN] " + expl - rep.keywords['xfail'] = expl - return rep - elif call.when == "call": - rep = __multicall__.execute() - if not item.config.getvalue("runxfail") and evalxfail.istrue(): - if call.excinfo: - rep.skipped = True - rep.failed = rep.passed = False - else: - rep.skipped = rep.passed = False - rep.failed = True - rep.keywords['xfail'] = evalxfail.getexplanation() - else: - if 'xfail' in rep.keywords: - del rep.keywords['xfail'] - return rep - -# called by terminalreporter progress reporting -def pytest_report_teststatus(report): - if 'xfail' in report.keywords: - if report.skipped: - return "xfailed", "x", "xfail" - elif report.failed: - return "xpassed", "X", "XPASS" - -# called by the terminalreporter instance/plugin -def pytest_terminal_summary(terminalreporter): - tr = terminalreporter - if not tr.reportchars: - #for name in "xfailed skipped failed xpassed": - # if not tr.stats.get(name, 0): - # tr.write_line("HINT: use '-r' option to see extra " - # "summary info about tests") - # break - return - - lines = [] - for char in tr.reportchars: - if char == "x": - show_xfailed(terminalreporter, lines) - elif char == "X": - show_xpassed(terminalreporter, lines) - elif char == "f": - show_failed(terminalreporter, lines) - elif char == "s": - show_skipped(terminalreporter, lines) - if lines: - tr._tw.sep("=", "short test summary info") - for line in lines: - tr._tw.line(line) - -def show_failed(terminalreporter, lines): - tw = terminalreporter._tw - failed = terminalreporter.stats.get("failed") - if failed: - for rep in failed: - pos = terminalreporter.gettestid(rep.item) - lines.append("FAIL %s" %(pos, )) - -def show_xfailed(terminalreporter, lines): - xfailed = terminalreporter.stats.get("xfailed") - if xfailed: - for rep in xfailed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XFAIL %s %s" %(pos, reason)) - -def show_xpassed(terminalreporter, lines): - xpassed = terminalreporter.stats.get("xpassed") - if xpassed: - for rep in xpassed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XPASS %s %s" %(pos, reason)) - -def cached_eval(config, expr, d): - if not hasattr(config, '_evalcache'): - config._evalcache = {} - try: - return config._evalcache[expr] - except KeyError: - #import sys - #print >>sys.stderr, ("cache-miss: %r" % expr) - config._evalcache[expr] = x = eval(expr, d) - return x - - -def folded_skips(skipped): - d = {} - for event in skipped: - entry = event.longrepr.reprcrash - key = entry.path, entry.lineno, entry.message - d.setdefault(key, []).append(event) - l = [] - for key, events in d.items(): - l.append((len(events),) + key) - return l - -def show_skipped(terminalreporter, lines): - tr = terminalreporter - skipped = tr.stats.get('skipped', []) - if skipped: - #if not tr.hasopt('skipped'): - # tr.write_line( - # "%d skipped tests, specify -rs for more info" % - # len(skipped)) - # return - fskips = folded_skips(skipped) - if fskips: - #tr.write_sep("_", "skipped test summary") - for num, fspath, lineno, reason in fskips: - if reason.startswith("Skipped: "): - reason = reason[9:] - lines.append("SKIP [%d] %s:%d: %s" % - (num, fspath, lineno, reason)) diff --git a/py/_plugin/pytest_recwarn.py b/py/_plugin/pytest_recwarn.py deleted file mode 100644 --- a/py/_plugin/pytest_recwarn.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -helpers for asserting deprecation and other warnings. - -Example usage ---------------------- - -You can use the ``recwarn`` funcarg to track -warnings within a test function: - -.. sourcecode:: python - - def test_hello(recwarn): - from warnings import warn - warn("hello", DeprecationWarning) - w = recwarn.pop(DeprecationWarning) - assert issubclass(w.category, DeprecationWarning) - assert 'hello' in str(w.message) - assert w.filename - assert w.lineno - -You can also call a global helper for checking -taht a certain function call yields a Deprecation -warning: - -.. sourcecode:: python - - import py - - def test_global(): - py.test.deprecated_call(myfunction, 17) - - -""" - -import py -import os - -def pytest_funcarg__recwarn(request): - """Return a WarningsRecorder instance that provides these methods: - - * ``pop(category=None)``: return last warning matching the category. - * ``clear()``: clear list of warnings - """ - warnings = WarningsRecorder() - request.addfinalizer(warnings.finalize) - return warnings - -def pytest_namespace(): - return {'deprecated_call': deprecated_call} - -def deprecated_call(func, *args, **kwargs): - """ assert that calling func(*args, **kwargs) - triggers a DeprecationWarning. - """ - warningmodule = py.std.warnings - l = [] - oldwarn_explicit = getattr(warningmodule, 'warn_explicit') - def warn_explicit(*args, **kwargs): - l.append(args) - oldwarn_explicit(*args, **kwargs) - oldwarn = getattr(warningmodule, 'warn') - def warn(*args, **kwargs): - l.append(args) - oldwarn(*args, **kwargs) - - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - try: - ret = func(*args, **kwargs) - finally: - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - if not l: - #print warningmodule - __tracebackhide__ = True - raise AssertionError("%r did not produce DeprecationWarning" %(func,)) - return ret - - -class RecordedWarning: - def __init__(self, message, category, filename, lineno, line): - self.message = message - self.category = category - self.filename = filename - self.lineno = lineno - self.line = line - -class WarningsRecorder: - def __init__(self): - warningmodule = py.std.warnings - self.list = [] - def showwarning(message, category, filename, lineno, line=0): - self.list.append(RecordedWarning( - message, category, filename, lineno, line)) - try: - self.old_showwarning(message, category, - filename, lineno, line=line) - except TypeError: - # < python2.6 - self.old_showwarning(message, category, filename, lineno) - self.old_showwarning = warningmodule.showwarning - warningmodule.showwarning = showwarning - - def pop(self, cls=Warning): - """ pop the first recorded warning, raise exception if not exists.""" - for i, w in enumerate(self.list): - if issubclass(w.category, cls): - return self.list.pop(i) - __tracebackhide__ = True - assert 0, "%r not found in %r" %(cls, self.list) - - #def resetregistry(self): - # import warnings - # warnings.onceregistry.clear() - # warnings.__warningregistry__.clear() - - def clear(self): - self.list[:] = [] - - def finalize(self): - py.std.warnings.showwarning = self.old_showwarning diff --git a/py/bin/env.py b/py/bin/env.py deleted file mode 100644 --- a/py/bin/env.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys, os, os.path - -progpath = sys.argv[0] -packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath))) -packagename = os.path.basename(packagedir) -bindir = os.path.join(packagedir, 'bin') -if sys.platform == 'win32': - bindir = os.path.join(bindir, 'win32') -rootdir = os.path.dirname(packagedir) - -def prepend_path(name, value): - sep = os.path.pathsep - curpath = os.environ.get(name, '') - newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ] - return setenv(name, sep.join(newpath)) - -def setenv(name, value): - shell = os.environ.get('SHELL', '') - comspec = os.environ.get('COMSPEC', '') - if shell.endswith('csh'): - cmd = 'setenv %s "%s"' % (name, value) - elif shell.endswith('sh'): - cmd = '%s="%s"; export %s' % (name, value, name) - elif comspec.endswith('cmd.exe'): - cmd = 'set %s=%s' % (name, value) - else: - assert False, 'Shell not supported.' - return cmd - -print(prepend_path('PATH', bindir)) -print(prepend_path('PYTHONPATH', rootdir)) diff --git a/py/_plugin/pytest_nose.py b/py/_plugin/pytest_nose.py deleted file mode 100644 --- a/py/_plugin/pytest_nose.py +++ /dev/null @@ -1,98 +0,0 @@ -"""nose-compatibility plugin: allow to run nose test suites natively. - -This is an experimental plugin for allowing to run tests written -in 'nosetests style with py.test. - -Usage -------------- - -type:: - - py.test # instead of 'nosetests' - -and you should be able to run nose style tests and at the same -time can make full use of py.test's capabilities. - -Supported nose Idioms ----------------------- - -* setup and teardown at module/class/method level -* SkipTest exceptions and markers -* setup/teardown decorators -* yield-based tests and their setup -* general usage of nose utilities - -Unsupported idioms / issues ----------------------------------- - -- nose-style doctests are not collected and executed correctly, - also fixtures don't work. - -- no nose-configuration is recognized - -If you find other issues or have suggestions please run:: - - py.test --pastebin=all - -and send the resulting URL to a py.test contact channel, -at best to the mailing list. -""" -import py -import inspect -import sys - -def pytest_runtest_makereport(__multicall__, item, call): - SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None) - if SkipTest: - if call.excinfo and call.excinfo.errisinstance(SkipTest): - # let's substitute the excinfo with a py.test.skip one - call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when) - call.excinfo = call2.excinfo - -def pytest_report_iteminfo(item): - # nose 0.11.1 uses decorators for "raises" and other helpers. - # for reporting progress by filename we fish for the filename - if isinstance(item, py.test.collect.Function): - obj = item.obj - if hasattr(obj, 'compat_co_firstlineno'): - fn = sys.modules[obj.__module__].__file__ - if fn.endswith(".pyc"): - fn = fn[:-1] - #assert 0 - #fn = inspect.getsourcefile(obj) or inspect.getfile(obj) - lineno = obj.compat_co_firstlineno - return py.path.local(fn), lineno, obj.__module__ - -def pytest_runtest_setup(item): - if isinstance(item, (py.test.collect.Function)): - if isinstance(item.parent, py.test.collect.Generator): - gen = item.parent - if not hasattr(gen, '_nosegensetup'): - call_optional(gen.obj, 'setup') - if isinstance(gen.parent, py.test.collect.Instance): - call_optional(gen.parent.obj, 'setup') - gen._nosegensetup = True - if not call_optional(item.obj, 'setup'): - # call module level setup if there is no object level one - call_optional(item.parent.obj, 'setup') - -def pytest_runtest_teardown(item): - if isinstance(item, py.test.collect.Function): - if not call_optional(item.obj, 'teardown'): - call_optional(item.parent.obj, 'teardown') - #if hasattr(item.parent, '_nosegensetup'): - # #call_optional(item._nosegensetup, 'teardown') - # del item.parent._nosegensetup - -def pytest_make_collect_report(collector): - if isinstance(collector, py.test.collect.Generator): - call_optional(collector.obj, 'setup') - -def call_optional(obj, name): - method = getattr(obj, name, None) - if method: - ismethod = inspect.ismethod(method) - rawcode = py.code.getrawcode(method) - if not rawcode.co_varnames[ismethod:]: - method() - return True diff --git a/py/_plugin/pytest_pytester.py b/py/_plugin/pytest_pytester.py deleted file mode 100644 --- a/py/_plugin/pytest_pytester.py +++ /dev/null @@ -1,500 +0,0 @@ -""" -funcargs and support code for testing py.test's own functionality. -""" - -import py -import sys, os -import re -import inspect -import time -from py._test.config import Config as pytestConfig -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("pylib") - group.addoption('--tools-on-path', - action="store_true", dest="toolsonpath", default=False, - help=("discover tools on PATH instead of going through py.cmdline.") - ) - -pytest_plugins = '_pytest' - -def pytest_funcarg__linecomp(request): - return LineComp() - -def pytest_funcarg__LineMatcher(request): - return LineMatcher - -def pytest_funcarg__testdir(request): - tmptestdir = TmpTestdir(request) - return tmptestdir - -rex_outcome = re.compile("(\d+) (\w+)") -class RunResult: - def __init__(self, ret, outlines, errlines, duration): - self.ret = ret - self.outlines = outlines - self.errlines = errlines - self.stdout = LineMatcher(outlines) - self.stderr = LineMatcher(errlines) - self.duration = duration - - def parseoutcomes(self): - for line in reversed(self.outlines): - if 'seconds' in line: - outcomes = rex_outcome.findall(line) - if outcomes: - d = {} - for num, cat in outcomes: - d[cat] = int(num) - return d - -class TmpTestdir: - def __init__(self, request): - self.request = request - self._pytest = request.getfuncargvalue("_pytest") - # XXX remove duplication with tmpdir plugin - basetmp = request.config.ensuretemp("testdir") - name = request.function.__name__ - for i in range(100): - try: - tmpdir = basetmp.mkdir(name + str(i)) - except py.error.EEXIST: - continue - break - # we need to create another subdir - # because Directory.collect() currently loads - # conftest.py from sibling directories - self.tmpdir = tmpdir.mkdir(name) - self.plugins = [] - self._syspathremove = [] - self.chdir() # always chdir - self.request.addfinalizer(self.finalize) - - def __repr__(self): - return "" % (self.tmpdir,) - - def Config(self, topdir=None): - if topdir is None: - topdir = self.tmpdir.dirpath() - return pytestConfig(topdir=topdir) - - def finalize(self): - for p in self._syspathremove: - py.std.sys.path.remove(p) - if hasattr(self, '_olddir'): - self._olddir.chdir() - # delete modules that have been loaded from tmpdir - for name, mod in list(sys.modules.items()): - if mod: - fn = getattr(mod, '__file__', None) - if fn and fn.startswith(str(self.tmpdir)): - del sys.modules[name] - - def getreportrecorder(self, obj): - if hasattr(obj, 'config'): - obj = obj.config - if hasattr(obj, 'hook'): - obj = obj.hook - assert hasattr(obj, '_hookspecs'), obj - reprec = ReportRecorder(obj) - reprec.hookrecorder = self._pytest.gethookrecorder(obj) - reprec.hook = reprec.hookrecorder.hook - return reprec - - def chdir(self): - old = self.tmpdir.chdir() - if not hasattr(self, '_olddir'): - self._olddir = old - - def _makefile(self, ext, args, kwargs): - items = list(kwargs.items()) - if args: - source = "\n".join(map(str, args)) + "\n" - basename = self.request.function.__name__ - items.insert(0, (basename, source)) - ret = None - for name, value in items: - p = self.tmpdir.join(name).new(ext=ext) - source = str(py.code.Source(value)).lstrip() - p.write(source.encode("utf-8"), "wb") - if ret is None: - ret = p - return ret - - - def makefile(self, ext, *args, **kwargs): - return self._makefile(ext, args, kwargs) - - def makeconftest(self, source): - return self.makepyfile(conftest=source) - - def makepyfile(self, *args, **kwargs): - return self._makefile('.py', args, kwargs) - - def maketxtfile(self, *args, **kwargs): - return self._makefile('.txt', args, kwargs) - - def syspathinsert(self, path=None): - if path is None: - path = self.tmpdir - py.std.sys.path.insert(0, str(path)) - self._syspathremove.append(str(path)) - - def mkdir(self, name): - return self.tmpdir.mkdir(name) - - def mkpydir(self, name): - p = self.mkdir(name) - p.ensure("__init__.py") - return p - - def genitems(self, colitems): - return list(self.session.genitems(colitems)) - - def inline_genitems(self, *args): - #config = self.parseconfig(*args) - config = self.parseconfig(*args) - session = config.initsession() - rec = self.getreportrecorder(config) - colitems = [config.getnode(arg) for arg in config.args] - items = list(session.genitems(colitems)) - return items, rec - - def runitem(self, source): - # used from runner functional tests - item = self.getitem(source) - # the test class where we are called from wants to provide the runner - testclassinstance = py.builtin._getimself(self.request.function) - runner = testclassinstance.getrunner() - return runner(item) - - def inline_runsource(self, source, *cmdlineargs): - p = self.makepyfile(source) - l = list(cmdlineargs) + [p] - return self.inline_run(*l) - - def inline_runsource1(self, *args): - args = list(args) - source = args.pop() - p = self.makepyfile(source) - l = list(args) + [p] - reprec = self.inline_run(*l) - reports = reprec.getreports("pytest_runtest_logreport") - assert len(reports) == 1, reports - return reports[0] - - def inline_run(self, *args): - args = ("-s", ) + args # otherwise FD leakage - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - session = config.initsession() - reprec = self.getreportrecorder(config) - colitems = config.getinitialnodes() - session.main(colitems) - config.pluginmanager.do_unconfigure(config) - return reprec - - def config_preparse(self): - config = self.Config() - for plugin in self.plugins: - if isinstance(plugin, str): - config.pluginmanager.import_plugin(plugin) - else: - if isinstance(plugin, dict): - plugin = PseudoPlugin(plugin) - if not config.pluginmanager.isregistered(plugin): - config.pluginmanager.register(plugin) - return config - - def parseconfig(self, *args): - if not args: - args = (self.tmpdir,) - config = self.config_preparse() - args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')] - config.parse(args) - return config - - def reparseconfig(self, args=None): - """ this is used from tests that want to re-invoke parse(). """ - if not args: - args = [self.tmpdir] - from py._test import config - oldconfig = config.config_per_process # py.test.config - try: - c = config.config_per_process = py.test.config = pytestConfig() - c.basetemp = oldconfig.mktemp("reparse", numbered=True) - c.parse(args) - return c - finally: - config.config_per_process = py.test.config = oldconfig - - def parseconfigure(self, *args): - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - return config - - def getitem(self, source, funcname="test_func"): - modcol = self.getmodulecol(source) - moditems = modcol.collect() - for item in modcol.collect(): - if item.name == funcname: - return item - else: - assert 0, "%r item not found in module:\n%s" %(funcname, source) - - def getitems(self, source): - modcol = self.getmodulecol(source) - return list(modcol.config.initsession().genitems([modcol])) - #assert item is not None, "%r item not found in module:\n%s" %(funcname, source) - #return item - - def getfscol(self, path, configargs=()): - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - return self.config.getnode(path) - - def getmodulecol(self, source, configargs=(), withinit=False): - kw = {self.request.function.__name__: py.code.Source(source).strip()} - path = self.makepyfile(**kw) - if withinit: - self.makepyfile(__init__ = "#") - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - #self.config.pluginmanager.do_configure(config=self.config) - # XXX - self.config.pluginmanager.import_plugin("runner") - plugin = self.config.pluginmanager.getplugin("runner") - plugin.pytest_configure(config=self.config) - - return self.config.getnode(path) - - def popen(self, cmdargs, stdout, stderr, **kw): - if not hasattr(py.std, 'subprocess'): - py.test.skip("no subprocess module") - env = os.environ.copy() - env['PYTHONPATH'] = ":".join(filter(None, [ - str(os.getcwd()), env.get('PYTHONPATH', '')])) - kw['env'] = env - #print "env", env - return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) - - def run(self, *cmdargs): - return self._run(*cmdargs) - - def _run(self, *cmdargs): - cmdargs = [str(x) for x in cmdargs] - p1 = self.tmpdir.join("stdout") - p2 = self.tmpdir.join("stderr") - print_("running", cmdargs, "curdir=", py.path.local()) - f1 = p1.open("wb") - f2 = p2.open("wb") - now = time.time() - popen = self.popen(cmdargs, stdout=f1, stderr=f2, - close_fds=(sys.platform != "win32")) - ret = popen.wait() - f1.close() - f2.close() - out = p1.read("rb") - out = getdecoded(out).splitlines() - err = p2.read("rb") - err = getdecoded(err).splitlines() - def dump_lines(lines, fp): - try: - for line in lines: - py.builtin.print_(line, file=fp) - except UnicodeEncodeError: - print("couldn't print to %s because of encoding" % (fp,)) - dump_lines(out, sys.stdout) - dump_lines(err, sys.stderr) - return RunResult(ret, out, err, time.time()-now) - - def runpybin(self, scriptname, *args): - fullargs = self._getpybinargs(scriptname) + args - return self.run(*fullargs) - - def _getpybinargs(self, scriptname): - if self.request.config.getvalue("toolsonpath"): - script = py.path.local.sysfind(scriptname) - assert script, "script %r not found" % scriptname - return (script,) - else: - cmdlinename = scriptname.replace(".", "") - assert hasattr(py.cmdline, cmdlinename), cmdlinename - source = ("import sys;sys.path.insert(0,%r);" - "import py;py.cmdline.%s()" % - (str(py._pydir.dirpath()), cmdlinename)) - return (sys.executable, "-c", source,) - - def runpython(self, script): - s = self._getsysprepend() - if s: - script.write(s + "\n" + script.read()) - return self.run(sys.executable, script) - - def _getsysprepend(self): - if not self.request.config.getvalue("toolsonpath"): - s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath()) - else: - s = "" - return s - - def runpython_c(self, command): - command = self._getsysprepend() + command - return self.run(py.std.sys.executable, "-c", command) - - def runpytest(self, *args): - p = py.path.local.make_numbered_dir(prefix="runpytest-", - keep=None, rootdir=self.tmpdir) - args = ('--basetemp=%s' % p, ) + args - plugins = [x for x in self.plugins if isinstance(x, str)] - if plugins: - args = ('-p', plugins[0]) + args - return self.runpybin("py.test", *args) - - def spawn_pytest(self, string, expect_timeout=10.0): - pexpect = py.test.importorskip("pexpect", "2.4") - if not self.request.config.getvalue("toolsonpath"): - py.test.skip("need --tools-on-path to run py.test script") - basetemp = self.tmpdir.mkdir("pexpect") - invoke = self._getpybinargs("py.test")[0] - cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) - child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w")) - child.timeout = expect_timeout - return child - -def getdecoded(out): - try: - return out.decode("utf-8") - except UnicodeDecodeError: - return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( - py.io.saferepr(out),) - -class PseudoPlugin: - def __init__(self, vars): - self.__dict__.update(vars) - -class ReportRecorder(object): - def __init__(self, hook): - self.hook = hook - self.registry = hook._registry - self.registry.register(self) - - def getcall(self, name): - return self.hookrecorder.getcall(name) - - def popcall(self, name): - return self.hookrecorder.popcall(name) - - def getcalls(self, names): - """ return list of ParsedCall instances matching the given eventname. """ - return self.hookrecorder.getcalls(names) - - # functionality for test reports - - def getreports(self, names="pytest_runtest_logreport pytest_collectreport"): - return [x.report for x in self.getcalls(names)] - - def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"): - """ return a testreport whose dotted import path matches """ - l = [] - for rep in self.getreports(names=names): - colitem = rep.getnode() - if not inamepart or inamepart in colitem.listnames(): - l.append(rep) - if not l: - raise ValueError("could not find test report matching %r: no test reports at all!" % - (inamepart,)) - if len(l) > 1: - raise ValueError("found more than one testreport matching %r: %s" %( - inamepart, l)) - return l[0] - - def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'): - return [rep for rep in self.getreports(names) if rep.failed] - - def getfailedcollections(self): - return self.getfailures('pytest_collectreport') - - def listoutcomes(self): - passed = [] - skipped = [] - failed = [] - for rep in self.getreports("pytest_runtest_logreport"): - if rep.passed: - if rep.when == "call": - passed.append(rep) - elif rep.skipped: - skipped.append(rep) - elif rep.failed: - failed.append(rep) - return passed, skipped, failed - - def countoutcomes(self): - return [len(x) for x in self.listoutcomes()] - - def assertoutcome(self, passed=0, skipped=0, failed=0): - realpassed, realskipped, realfailed = self.listoutcomes() - assert passed == len(realpassed) - assert skipped == len(realskipped) - assert failed == len(realfailed) - - def clear(self): - self.hookrecorder.calls[:] = [] - - def unregister(self): - self.registry.unregister(self) - self.hookrecorder.finish_recording() - -class LineComp: - def __init__(self): - self.stringio = py.io.TextIO() - - def assert_contains_lines(self, lines2): - """ assert that lines2 are contained (linearly) in lines1. - return a list of extralines found. - """ - __tracebackhide__ = True - val = self.stringio.getvalue() - self.stringio.truncate(0) - self.stringio.seek(0) - lines1 = val.split("\n") - return LineMatcher(lines1).fnmatch_lines(lines2) - -class LineMatcher: - def __init__(self, lines): - self.lines = lines - - def str(self): - return "\n".join(self.lines) - - def fnmatch_lines(self, lines2): - if isinstance(lines2, str): - lines2 = py.code.Source(lines2) - if isinstance(lines2, py.code.Source): - lines2 = lines2.strip().lines - - from fnmatch import fnmatch - lines1 = self.lines[:] - nextline = None - extralines = [] - __tracebackhide__ = True - for line in lines2: - nomatchprinted = False - while lines1: - nextline = lines1.pop(0) - if line == nextline: - print_("exact match:", repr(line)) - break - elif fnmatch(nextline, line): - print_("fnmatch:", repr(line)) - print_(" with:", repr(nextline)) - break - else: - if not nomatchprinted: - print_("nomatch:", repr(line)) - nomatchprinted = True - print_(" and:", repr(nextline)) - extralines.append(nextline) - else: - assert line == nextline diff --git a/py/_plugin/pytest_default.py b/py/_plugin/pytest_default.py deleted file mode 100644 --- a/py/_plugin/pytest_default.py +++ /dev/null @@ -1,131 +0,0 @@ -""" default hooks and general py.test options. """ - -import sys -import py - -def pytest_pyfunc_call(__multicall__, pyfuncitem): - if not __multicall__.execute(): - testfunction = pyfuncitem.obj - if pyfuncitem._isyieldedfunction(): - testfunction(*pyfuncitem._args) - else: - funcargs = pyfuncitem.funcargs - testfunction(**funcargs) - -def pytest_collect_file(path, parent): - ext = path.ext - pb = path.purebasename - if pb.startswith("test_") or pb.endswith("_test") or \ - path in parent.config._argfspaths: - if ext == ".py": - return parent.ihook.pytest_pycollect_makemodule( - path=path, parent=parent) - -def pytest_pycollect_makemodule(path, parent): - return parent.Module(path, parent) - -def pytest_funcarg__pytestconfig(request): - """ the pytest config object with access to command line opts.""" - return request.config - -def pytest_ignore_collect(path, config): - ignore_paths = config.getconftest_pathlist("collect_ignore", path=path) - ignore_paths = ignore_paths or [] - excludeopt = config.getvalue("ignore") - if excludeopt: - ignore_paths.extend([py.path.local(x) for x in excludeopt]) - return path in ignore_paths - # XXX more refined would be: - if ignore_paths: - for p in ignore_paths: - if path == p or path.relto(p): - return True - - -def pytest_collect_directory(path, parent): - # XXX reconsider the following comment - # not use parent.Directory here as we generally - # want dir/conftest.py to be able to - # define Directory(dir) already - if not parent.recfilter(path): # by default special ".cvs", ... - # check if cmdline specified this dir or a subdir directly - for arg in parent.config._argfspaths: - if path == arg or arg.relto(path): - break - else: - return - Directory = parent.config._getcollectclass('Directory', path) - return Directory(path, parent=parent) - -def pytest_report_iteminfo(item): - return item.reportinfo() - -def pytest_addoption(parser): - group = parser.getgroup("general", "running and selection options") - group._addoption('-x', '--exitfirst', action="store_true", default=False, - dest="exitfirst", - help="exit instantly on first error or failed test."), - group._addoption('--maxfail', metavar="num", - action="store", type="int", dest="maxfail", default=0, - help="exit after first num failures or errors.") - group._addoption('-k', - action="store", dest="keyword", default='', - help="only run test items matching the given " - "space separated keywords. precede a keyword with '-' to negate. " - "Terminate the expression with ':' to treat a match as a signal " - "to run all subsequent tests. ") - - group = parser.getgroup("collect", "collection") - group.addoption('--collectonly', - action="store_true", dest="collectonly", - help="only collect tests, don't execute them."), - group.addoption("--ignore", action="append", metavar="path", - help="ignore path during collection (multi-allowed).") - group.addoption('--confcutdir', dest="confcutdir", default=None, - metavar="dir", - help="only load conftest.py's relative to specified dir.") - - group = parser.getgroup("debugconfig", - "test process debugging and configuration") - group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", - help="base temporary directory for this test run.") - -def pytest_configure(config): - setsession(config) - # compat - if config.getvalue("exitfirst"): - config.option.maxfail = 1 - -def setsession(config): - val = config.getvalue - if val("collectonly"): - from py._test.session import Session - config.setsessionclass(Session) - -# pycollect related hooks and code, should move to pytest_pycollect.py - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - res = __multicall__.execute() - if res is not None: - return res - if collector._istestclasscandidate(name, obj): - res = collector._deprecated_join(name) - if res is not None: - return res - return collector.Class(name, parent=collector) - elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): - res = collector._deprecated_join(name) - if res is not None: - return res - if is_generator(obj): - # XXX deprecation warning - return collector.Generator(name, parent=collector) - else: - return collector._genfunctions(name, obj) - -def is_generator(func): - try: - return py.code.getrawcode(func).co_flags & 32 # generator function - except AttributeError: # builtin functions have no bytecode - # assume them to not be generators - return False diff --git a/py/_plugin/pytest_monkeypatch.py b/py/_plugin/pytest_monkeypatch.py deleted file mode 100644 --- a/py/_plugin/pytest_monkeypatch.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -safely patch object attributes, dicts and environment variables. - -Usage ----------------- - -Use the `monkeypatch funcarg`_ to tweak your global test environment -for running a particular test. You can safely set/del an attribute, -dictionary item or environment variable by respective methods -on the monkeypatch funcarg. If you want e.g. to set an ENV1 variable -and have os.path.expanduser return a particular directory, you can -write it down like this: - -.. sourcecode:: python - - def test_mytest(monkeypatch): - monkeypatch.setenv('ENV1', 'myval') - monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz') - ... # your test code that uses those patched values implicitely - -After the test function finished all modifications will be undone, -because the ``monkeypatch.undo()`` method is registered as a finalizer. - -``monkeypatch.setattr/delattr/delitem/delenv()`` all -by default raise an Exception if the target does not exist. -Pass ``raising=False`` if you want to skip this check. - -prepending to PATH or other environment variables ---------------------------------------------------------- - -To prepend a value to an already existing environment parameter: - -.. sourcecode:: python - - def test_mypath_finding(monkeypatch): - monkeypatch.setenv('PATH', 'x/y', prepend=":") - # in bash language: export PATH=x/y:$PATH - -calling "undo" finalization explicitely ------------------------------------------ - -At the end of function execution py.test invokes -a teardown hook which undoes all monkeypatch changes. -If you do not want to wait that long you can call -finalization explicitely:: - - monkeypatch.undo() - -This will undo previous changes. This call consumes the -undo stack. Calling it a second time has no effect unless -you start monkeypatching after the undo call. - -.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/ -""" - -import py, os, sys - -def pytest_funcarg__monkeypatch(request): - """The returned ``monkeypatch`` funcarg provides these - helper methods to modify objects, dictionaries or os.environ:: - - monkeypatch.setattr(obj, name, value, raising=True) - monkeypatch.delattr(obj, name, raising=True) - monkeypatch.setitem(mapping, name, value) - monkeypatch.delitem(obj, name, raising=True) - monkeypatch.setenv(name, value, prepend=False) - monkeypatch.delenv(name, value, raising=True) - monkeypatch.syspath_prepend(path) - - All modifications will be undone when the requesting - test function finished its execution. The ``raising`` - parameter determines if a KeyError or AttributeError - will be raised if the set/deletion operation has no target. - """ - monkeypatch = MonkeyPatch() - request.addfinalizer(monkeypatch.undo) - return monkeypatch - -notset = object() - -class MonkeyPatch: - def __init__(self): - self._setattr = [] - self._setitem = [] - - def setattr(self, obj, name, value, raising=True): - oldval = getattr(obj, name, notset) - if raising and oldval is notset: - raise AttributeError("%r has no attribute %r" %(obj, name)) - self._setattr.insert(0, (obj, name, oldval)) - setattr(obj, name, value) - - def delattr(self, obj, name, raising=True): - if not hasattr(obj, name): - if raising: - raise AttributeError(name) - else: - self._setattr.insert(0, (obj, name, getattr(obj, name, notset))) - delattr(obj, name) - - def setitem(self, dic, name, value): - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - dic[name] = value - - def delitem(self, dic, name, raising=True): - if name not in dic: - if raising: - raise KeyError(name) - else: - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - del dic[name] - - def setenv(self, name, value, prepend=None): - value = str(value) - if prepend and name in os.environ: - value = value + prepend + os.environ[name] - self.setitem(os.environ, name, value) - - def delenv(self, name, raising=True): - self.delitem(os.environ, name, raising=raising) - - def syspath_prepend(self, path): - if not hasattr(self, '_savesyspath'): - self._savesyspath = sys.path[:] - sys.path.insert(0, str(path)) - - def undo(self): - for obj, name, value in self._setattr: - if value is not notset: - setattr(obj, name, value) - else: - delattr(obj, name) - self._setattr[:] = [] - for dictionary, name, value in self._setitem: - if value is notset: - del dictionary[name] - else: - dictionary[name] = value - self._setitem[:] = [] - if hasattr(self, '_savesyspath'): - sys.path[:] = self._savesyspath diff --git a/py/_plugin/hookspec.py b/py/_plugin/hookspec.py deleted file mode 100644 --- a/py/_plugin/hookspec.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -hook specifications for py.test plugins -""" - -# ------------------------------------------------------------------------- -# Command line and configuration -# ------------------------------------------------------------------------- - -def pytest_namespace(): - "return dict of name->object which will get stored at py.test. namespace" - -def pytest_addoption(parser): - "add optparse-style options via parser.addoption." - -def pytest_addhooks(pluginmanager): - "add hooks via pluginmanager.registerhooks(module)" - -def pytest_configure(config): - """ called after command line options have been parsed. - and all plugins and initial conftest files been loaded. - """ - -def pytest_unconfigure(config): - """ called before test process is exited. """ - -# ------------------------------------------------------------------------- -# collection hooks -# ------------------------------------------------------------------------- - -def pytest_ignore_collect(path, config): - """ return true value to prevent considering this path for collection. - This hook is consulted for all files and directories prior to considering - collection hooks. - """ -pytest_ignore_collect.firstresult = True - -def pytest_collect_directory(path, parent): - """ return Collection node or None for the given path. """ -pytest_collect_directory.firstresult = True - -def pytest_collect_file(path, parent): - """ return Collection node or None for the given path. """ - -def pytest_collectstart(collector): - """ collector starts collecting. """ - -def pytest_collectreport(report): - """ collector finished collecting. """ - -def pytest_deselected(items): - """ called for test items deselected by keyword. """ - -def pytest_make_collect_report(collector): - """ perform a collection and return a collection. """ -pytest_make_collect_report.firstresult = True - -# XXX rename to item_collected()? meaning in distribution context? -def pytest_itemstart(item, node=None): - """ test item gets collected. """ - -# ------------------------------------------------------------------------- -# Python test function related hooks -# ------------------------------------------------------------------------- - -def pytest_pycollect_makemodule(path, parent): - """ return a Module collector or None for the given path. - This hook will be called for each matching test module path. - The pytest_collect_file hook needs to be used if you want to - create test modules for files that do not match as a test module. - """ -pytest_pycollect_makemodule.firstresult = True - -def pytest_pycollect_makeitem(collector, name, obj): - """ return custom item/collector for a python object in a module, or None. """ -pytest_pycollect_makeitem.firstresult = True - -def pytest_pyfunc_call(pyfuncitem): - """ call underlying test function. """ -pytest_pyfunc_call.firstresult = True - -def pytest_generate_tests(metafunc): - """ generate (multiple) parametrized calls to a test function.""" - -# ------------------------------------------------------------------------- -# generic runtest related hooks -# ------------------------------------------------------------------------- - -def pytest_runtest_protocol(item): - """ implement fixture, run and report about the given test item. """ -pytest_runtest_protocol.firstresult = True - -def pytest_runtest_setup(item): - """ called before pytest_runtest_call(). """ - -def pytest_runtest_call(item): - """ execute test item. """ - -def pytest_runtest_teardown(item): - """ called after pytest_runtest_call(). """ - -def pytest_runtest_makereport(item, call): - """ make a test report for the given item and call outcome. """ -pytest_runtest_makereport.firstresult = True - -def pytest_runtest_logreport(report): - """ process item test report. """ - -# special handling for final teardown - somewhat internal for now -def pytest__teardown_final(session): - """ called before test session finishes. """ -pytest__teardown_final.firstresult = True - -def pytest__teardown_final_logerror(report): - """ called if runtest_teardown_final failed. """ - -# ------------------------------------------------------------------------- -# test session related hooks -# ------------------------------------------------------------------------- - -def pytest_sessionstart(session): - """ before session.main() is called. """ - -def pytest_sessionfinish(session, exitstatus): - """ whole test run finishes. """ - -# ------------------------------------------------------------------------- -# hooks for influencing reporting (invoked from pytest_terminal) -# ------------------------------------------------------------------------- - -def pytest_report_header(config): - """ return a string to be displayed as header info for terminal reporting.""" - -def pytest_report_teststatus(report): - """ return result-category, shortletter and verbose word for reporting.""" -pytest_report_teststatus.firstresult = True - -def pytest_terminal_summary(terminalreporter): - """ add additional section in terminal summary reporting. """ - -def pytest_report_iteminfo(item): - """ return (fspath, lineno, name) for the item. - the information is used for result display and to sort tests - """ -pytest_report_iteminfo.firstresult = True - -# ------------------------------------------------------------------------- -# doctest hooks -# ------------------------------------------------------------------------- - -def pytest_doctest_prepare_content(content): - """ return processed content for a given doctest""" -pytest_doctest_prepare_content.firstresult = True - - -# ------------------------------------------------------------------------- -# error handling and internal debugging hooks -# ------------------------------------------------------------------------- - -def pytest_plugin_registered(plugin, manager): - """ a new py lib plugin got registered. """ - -def pytest_plugin_unregistered(plugin): - """ a py lib plugin got unregistered. """ - -def pytest_internalerror(excrepr): - """ called for internal errors. """ - -def pytest_keyboard_interrupt(excinfo): - """ called for keyboard interrupt. """ - -def pytest_trace(category, msg): - """ called for debug info. """ diff --git a/py/bin/py.lookup b/py/bin/py.lookup deleted file mode 100755 --- a/py/bin/py.lookup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pylookup() \ No newline at end of file diff --git a/py/_cmdline/pysvnwcrevert.py b/py/_cmdline/pysvnwcrevert.py deleted file mode 100755 --- a/py/_cmdline/pysvnwcrevert.py +++ /dev/null @@ -1,55 +0,0 @@ -#! /usr/bin/env python -"""\ -py.svnwcrevert [options] WCPATH - -Running this script and then 'svn up' puts the working copy WCPATH in a state -as clean as a fresh check-out. - -WARNING: you'll loose all local changes, obviously! - -This script deletes all files that have been modified -or that svn doesn't explicitly know about, including svn:ignored files -(like .pyc files, hint hint). - -The goal of this script is to leave the working copy with some files and -directories possibly missing, but - most importantly - in a state where -the following 'svn up' won't just crash. -""" - -import sys, py - -def kill(p, root): - print('< %s' % (p.relto(root),)) - p.remove(rec=1) - -def svnwcrevert(path, root=None, precious=[]): - if root is None: - root = path - wcpath = py.path.svnwc(path) - try: - st = wcpath.status() - except ValueError: # typically, "bad char in wcpath" - kill(path, root) - return - for p in path.listdir(): - if p.basename == '.svn' or p.basename in precious: - continue - wcp = py.path.svnwc(p) - if wcp not in st.unchanged and wcp not in st.external: - kill(p, root) - elif p.check(dir=1): - svnwcrevert(p, root) - -# XXX add a functional test - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-p", "--precious", - action="append", dest="precious", default=[], - help="preserve files with this name") - -def main(): - opts, args = parser.parse_args() - if len(args) != 1: - parser.print_help() - sys.exit(2) - svnwcrevert(py.path.local(args[0]), precious=opts.precious) diff --git a/py/_plugin/__init__.py b/py/_plugin/__init__.py deleted file mode 100644 --- a/py/_plugin/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_plugin/pytest_mark.py b/py/_plugin/pytest_mark.py deleted file mode 100644 --- a/py/_plugin/pytest_mark.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -generic mechanism for marking python functions. - -By using the ``py.test.mark`` helper you can instantiate -decorators that will set named meta data on test functions. - -Marking a single function ----------------------------------------------------- - -You can "mark" a test function with meta data like this:: - - @py.test.mark.webtest - def test_send_http(): - ... - -This will set a "Marker" instance as a function attribute named "webtest". -You can also specify parametrized meta data like this:: - - @py.test.mark.webtest(firefox=30) - def test_receive(): - ... - -The named marker can be accessed like this later:: - - test_receive.webtest.kwargs['firefox'] == 30 - -In addition to set key-value pairs you can also use positional arguments:: - - @py.test.mark.webtest("triangular") - def test_receive(): - ... - -and later access it with ``test_receive.webtest.args[0] == 'triangular``. - -.. _`scoped-marking`: - -Marking whole classes or modules ----------------------------------------------------- - -If you are programming with Python2.6 you may use ``py.test.mark`` decorators -with classes to apply markers to all its test methods:: - - @py.test.mark.webtest - class TestClass: - def test_startup(self): - ... - def test_startup_and_more(self): - ... - -This is equivalent to directly applying the decorator to the -two test functions. - -To remain compatible with Python2.5 you can also set a -``pytestmark`` attribute on a TestClass like this:: - - import py - - class TestClass: - pytestmark = py.test.mark.webtest - -or if you need to use multiple markers you can use a list:: - - import py - - class TestClass: - pytestmark = [py.test.mark.webtest, pytest.mark.slowtest] - -You can also set a module level marker:: - - import py - pytestmark = py.test.mark.webtest - -in which case it will be applied to all functions and -methods defined in the module. - -Using "-k MARKNAME" to select tests ----------------------------------------------------- - -You can use the ``-k`` command line option to select -tests:: - - py.test -k webtest # will only run tests marked as webtest - -""" -import py - -def pytest_namespace(): - return {'mark': MarkGenerator()} - -class MarkGenerator: - """ non-underscore attributes of this object can be used as decorators for - marking test functions. Example: @py.test.mark.slowtest in front of a - function will set the 'slowtest' marker object on it. """ - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - return MarkDecorator(name) - -class MarkDecorator: - """ decorator for setting function attributes. """ - def __init__(self, name): - self.markname = name - self.kwargs = {} - self.args = [] - - def __repr__(self): - d = self.__dict__.copy() - name = d.pop('markname') - return "" %(name, d) - - def __call__(self, *args, **kwargs): - """ if passed a single callable argument: decorate it with mark info. - otherwise add *args/**kwargs in-place to mark information. """ - if args: - func = args[0] - if len(args) == 1 and hasattr(func, '__call__') or \ - hasattr(func, '__bases__'): - if hasattr(func, '__bases__'): - if hasattr(func, 'pytestmark'): - l = func.pytestmark - if not isinstance(l, list): - func.pytestmark = [l, self] - else: - l.append(self) - else: - func.pytestmark = [self] - else: - holder = getattr(func, self.markname, None) - if holder is None: - holder = MarkInfo(self.markname, self.args, self.kwargs) - setattr(func, self.markname, holder) - else: - holder.kwargs.update(self.kwargs) - holder.args.extend(self.args) - return func - else: - self.args.extend(args) - self.kwargs.update(kwargs) - return self - -class MarkInfo: - def __init__(self, name, args, kwargs): - self._name = name - self.args = args - self.kwargs = kwargs - - def __getattr__(self, name): - if name[0] != '_' and name in self.kwargs: - py.log._apiwarn("1.1", "use .kwargs attribute to access key-values") - return self.kwargs[name] - raise AttributeError(name) - - def __repr__(self): - return "" % ( - self._name, self.args, self.kwargs) - - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - item = __multicall__.execute() - if isinstance(item, py.test.collect.Function): - cls = collector.getparent(py.test.collect.Class) - mod = collector.getparent(py.test.collect.Module) - func = item.obj - func = getattr(func, '__func__', func) # py3 - func = getattr(func, 'im_func', func) # py2 - for parent in [x for x in (mod, cls) if x]: - marker = getattr(parent.obj, 'pytestmark', None) - if marker is not None: - if not isinstance(marker, list): - marker = [marker] - for mark in marker: - if isinstance(mark, MarkDecorator): - mark(func) - return item diff --git a/py/bin/py.convert_unittest b/py/bin/py.convert_unittest deleted file mode 100755 --- a/py/bin/py.convert_unittest +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pyconvert_unittest() \ No newline at end of file diff --git a/py/_plugin/pytest_tmpdir.py b/py/_plugin/pytest_tmpdir.py deleted file mode 100644 --- a/py/_plugin/pytest_tmpdir.py +++ /dev/null @@ -1,22 +0,0 @@ -"""provide temporary directories to test functions. - -usage example:: - - def test_plugin(tmpdir): - tmpdir.join("hello").write("hello") - -.. _`py.path.local`: ../../path.html - -""" -import py - -def pytest_funcarg__tmpdir(request): - """return a temporary directory path object - unique to each test function invocation, - created as a sub directory of the base temporary - directory. The returned object is a `py.path.local`_ - path object. - """ - name = request.function.__name__ - x = request.config.mktemp(name, numbered=True) - return x.realpath() diff --git a/py/bin/win32/py.convert_unittest.cmd b/py/bin/win32/py.convert_unittest.cmd deleted file mode 100644 --- a/py/bin/win32/py.convert_unittest.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.convert_unittest" %* \ No newline at end of file diff --git a/py/bin/win32/py.svnwcrevert.cmd b/py/bin/win32/py.svnwcrevert.cmd deleted file mode 100644 --- a/py/bin/win32/py.svnwcrevert.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.svnwcrevert" %* \ No newline at end of file diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py deleted file mode 100644 --- a/py/_plugin/pytest_restdoc.py +++ /dev/null @@ -1,429 +0,0 @@ -""" -perform ReST syntax, local and remote reference tests on .rst/.txt files. -""" -import py -import sys, os, re - -def pytest_addoption(parser): - group = parser.getgroup("ReST", "ReST documentation check options") - group.addoption('-R', '--urlcheck', - action="store_true", dest="urlcheck", default=False, - help="urlopen() remote links found in ReST text files.") - group.addoption('--urltimeout', action="store", metavar="secs", - type="int", dest="urlcheck_timeout", default=5, - help="timeout in seconds for remote urlchecks") - group.addoption('--forcegen', - action="store_true", dest="forcegen", default=False, - help="force generation of html files.") - -def pytest_collect_file(path, parent): - if path.ext in (".txt", ".rst"): - project = getproject(path) - if project is not None: - return ReSTFile(path, parent=parent, project=project) - -def getproject(path): - for parent in path.parts(reverse=True): - confrest = parent.join("confrest.py") - if confrest.check(): - Project = confrest.pyimport().Project - return Project(parent) - -class ReSTFile(py.test.collect.File): - def __init__(self, fspath, parent, project): - super(ReSTFile, self).__init__(fspath=fspath, parent=parent) - self.project = project - - def collect(self): - return [ - ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project), - LinkCheckerMaker("checklinks", parent=self), - DoctestText("doctest", parent=self), - ] - -def deindent(s, sep='\n'): - leastspaces = -1 - lines = s.split(sep) - for line in lines: - if not line.strip(): - continue - spaces = len(line) - len(line.lstrip()) - if leastspaces == -1 or spaces < leastspaces: - leastspaces = spaces - if leastspaces == -1: - return s - for i, line in enumerate(lines): - if not line.strip(): - lines[i] = '' - else: - lines[i] = line[leastspaces:] - return sep.join(lines) - -class ReSTSyntaxTest(py.test.collect.Item): - def __init__(self, name, parent, project): - super(ReSTSyntaxTest, self).__init__(name=name, parent=parent) - self.project = project - - def reportinfo(self): - return self.fspath, None, "syntax check" - - def runtest(self): - self.restcheck(py.path.svnwc(self.fspath)) - - def restcheck(self, path): - py.test.importorskip("docutils") - self.register_linkrole() - from docutils.utils import SystemMessage - try: - self._checkskip(path, self.project.get_htmloutputpath(path)) - self.project.process(path) - except KeyboardInterrupt: - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") - - def register_linkrole(self): - #directive.register_linkrole('api', self.resolve_linkrole) - #directive.register_linkrole('source', self.resolve_linkrole) -# -# # XXX fake sphinx' "toctree" and refs -# directive.register_linkrole('ref', self.resolve_linkrole) - - from docutils.parsers.rst import directives - def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - toctree_directive.content = 1 - toctree_directive.options = {'maxdepth': int, 'glob': directives.flag, - 'hidden': directives.flag} - directives.register_directive('toctree', toctree_directive) - self.register_pygments() - - def register_pygments(self): - # taken from pygments-main/external/rst-directive.py - from docutils.parsers.rst import directives - try: - from pygments.formatters import HtmlFormatter - except ImportError: - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - pygments_directive.options = {} - else: - # The default formatter - DEFAULT = HtmlFormatter(noclasses=True) - # Add name -> formatter pairs for every variant you want to use - VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), - } - - from docutils import nodes - - from pygments import highlight - from pygments.lexers import get_lexer_by_name, TextLexer - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight('\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - - pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - directives.register_directive('sourcecode', pygments_directive) - - def resolve_linkrole(self, name, text, check=True): - apigen_relpath = self.project.apigen_relpath - - if name == 'api': - if text == 'py': - return ('py', apigen_relpath + 'api/index.html') - else: - assert text.startswith('py.'), ( - 'api link "%s" does not point to the py package') % (text,) - dotted_name = text - if dotted_name.find('(') > -1: - dotted_name = dotted_name[:text.find('(')] - # remove pkg root - path = dotted_name.split('.')[1:] - dotted_name = '.'.join(path) - obj = py - if check: - for chunk in path: - try: - obj = getattr(obj, chunk) - except AttributeError: - raise AssertionError( - 'problem with linkrole :api:`%s`: can not resolve ' - 'dotted name %s' % (text, dotted_name,)) - return (text, apigen_relpath + 'api/%s.html' % (dotted_name,)) - elif name == 'source': - assert text.startswith('py/'), ('source link "%s" does not point ' - 'to the py package') % (text,) - relpath = '/'.join(text.split('/')[1:]) - if check: - pkgroot = py._pydir - abspath = pkgroot.join(relpath) - assert pkgroot.join(relpath).check(), ( - 'problem with linkrole :source:`%s`: ' - 'path %s does not exist' % (text, relpath)) - if relpath.endswith('/') or not relpath: - relpath += 'index.html' - else: - relpath += '.html' - return (text, apigen_relpath + 'source/%s' % (relpath,)) - elif name == 'ref': - return ("", "") - - def _checkskip(self, lpath, htmlpath=None): - if not self.config.getvalue("forcegen"): - lpath = py.path.local(lpath) - if htmlpath is not None: - htmlpath = py.path.local(htmlpath) - if lpath.ext == '.txt': - htmlpath = htmlpath or lpath.new(ext='.html') - if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): - py.test.skip("html file is up to date, use --forcegen to regenerate") - #return [] # no need to rebuild - -class DoctestText(py.test.collect.Item): - def reportinfo(self): - return self.fspath, None, "doctest" - - def runtest(self): - content = self._normalize_linesep() - newcontent = self.config.hook.pytest_doctest_prepare_content(content=content) - if newcontent is not None: - content = newcontent - s = content - l = [] - prefix = '.. >>> ' - mod = py.std.types.ModuleType(self.fspath.purebasename) - skipchunk = False - for line in deindent(s).split('\n'): - stripped = line.strip() - if skipchunk and line.startswith(skipchunk): - py.builtin.print_("skipping", line) - continue - skipchunk = False - if stripped.startswith(prefix): - try: - py.builtin.exec_(py.code.Source( - stripped[len(prefix):]).compile(), mod.__dict__) - except ValueError: - e = sys.exc_info()[1] - if e.args and e.args[0] == "skipchunk": - skipchunk = " " * (len(line) - len(line.lstrip())) - else: - raise - else: - l.append(line) - docstring = "\n".join(l) - mod.__doc__ = docstring - failed, tot = py.std.doctest.testmod(mod, verbose=1) - if failed: - py.test.fail("doctest %s: %s failed out of %s" %( - self.fspath, failed, tot)) - - def _normalize_linesep(self): - # XXX quite nasty... but it works (fixes win32 issues) - s = self.fspath.read() - linesep = '\n' - if '\r' in s: - if '\n' not in s: - linesep = '\r' - else: - linesep = '\r\n' - s = s.replace(linesep, '\n') - return s - -class LinkCheckerMaker(py.test.collect.Collector): - def collect(self): - return list(self.genlinkchecks()) - - def genlinkchecks(self): - path = self.fspath - # generating functions + args as single tests - timeout = self.config.getvalue("urlcheck_timeout") - for lineno, line in enumerate(path.readlines()): - line = line.strip() - if line.startswith('.. _'): - if line.startswith('.. _`'): - delim = '`:' - else: - delim = ':' - l = line.split(delim, 1) - if len(l) != 2: - continue - tryfn = l[1].strip() - name = "%s:%d" %(tryfn, lineno) - if tryfn.startswith('http:') or tryfn.startswith('https'): - if self.config.getvalue("urlcheck"): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno, timeout), checkfunc=urlcheck) - elif tryfn.startswith('webcal:'): - continue - else: - i = tryfn.find('#') - if i != -1: - checkfn = tryfn[:i] - else: - checkfn = tryfn - if checkfn.strip() and (1 or checkfn.endswith('.html')): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno), checkfunc=localrefcheck) - -class CheckLink(py.test.collect.Item): - def __init__(self, name, parent, args, checkfunc): - super(CheckLink, self).__init__(name, parent) - self.args = args - self.checkfunc = checkfunc - - def runtest(self): - return self.checkfunc(*self.args) - - def reportinfo(self, basedir=None): - return (self.fspath, self.args[2], "checklink: %s" % self.args[0]) - -def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): - old = py.std.socket.getdefaulttimeout() - py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN) - try: - try: - py.builtin.print_("trying remote", tryfn) - py.std.urllib2.urlopen(tryfn) - finally: - py.std.socket.setdefaulttimeout(old) - except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): - e = sys.exc_info()[1] - if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden - py.test.skip("%s: %s" %(tryfn, str(e))) - else: - py.test.fail("remote reference error %r in %s:%d\n%s" %( - tryfn, path.basename, lineno+1, e)) - -def localrefcheck(tryfn, path, lineno): - # assume it should be a file - i = tryfn.find('#') - if tryfn.startswith('javascript:'): - return # don't check JS refs - if i != -1: - anchor = tryfn[i+1:] - tryfn = tryfn[:i] - else: - anchor = '' - fn = path.dirpath(tryfn) - ishtml = fn.ext == '.html' - fn = ishtml and fn.new(ext='.txt') or fn - py.builtin.print_("filename is", fn) - if not fn.check(): # not ishtml or not fn.check(): - if not py.path.local(tryfn).check(): # the html could be there - py.test.fail("reference error %r in %s:%d" %( - tryfn, path.basename, lineno+1)) - if anchor: - source = unicode(fn.read(), 'latin1') - source = source.lower().replace('-', ' ') # aehem - - anchor = anchor.replace('-', ' ') - match2 = ".. _`%s`:" % anchor - match3 = ".. _%s:" % anchor - candidates = (anchor, match2, match3) - py.builtin.print_("candidates", repr(candidates)) - for line in source.split('\n'): - line = line.strip() - if line in candidates: - break - else: - py.test.fail("anchor reference error %s#%s in %s:%d" %( - tryfn, anchor, path.basename, lineno+1)) - -if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()): - def log(msg): - print(msg) -else: - def log(msg): - pass - -def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'): - """ return html latin1-encoded document for the given input. - source a ReST-string - sourcepath where to look for includes (basically) - stylesheet path (to be used if any) - """ - from docutils.core import publish_string - kwargs = { - 'stylesheet' : stylesheet, - 'stylesheet_path': None, - 'traceback' : 1, - 'embed_stylesheet': 0, - 'output_encoding' : encoding, - #'halt' : 0, # 'info', - 'halt_level' : 2, - } - # docutils uses os.getcwd() :-( - source_path = os.path.abspath(str(source_path)) - prevdir = os.getcwd() - try: - #os.chdir(os.path.dirname(source_path)) - return publish_string(source, source_path, writer_name='html', - settings_overrides=kwargs) - finally: - os.chdir(prevdir) - -def process(txtpath, encoding='latin1'): - """ process a textfile """ - log("processing %s" % txtpath) - assert txtpath.check(ext='.txt') - if isinstance(txtpath, py.path.svnwc): - txtpath = txtpath.localpath - htmlpath = txtpath.new(ext='.html') - #svninfopath = txtpath.localpath.new(ext='.svninfo') - - style = txtpath.dirpath('style.css') - if style.check(): - stylesheet = style.basename - else: - stylesheet = None - content = unicode(txtpath.read(), encoding) - doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding) - htmlpath.open('wb').write(doc) - #log("wrote %r" % htmlpath) - #if txtpath.check(svnwc=1, versioned=1): - # info = txtpath.info() - # svninfopath.dump(info) - -if sys.version_info > (3, 0): - def _uni(s): return s -else: - def _uni(s): - return unicode(s) - -rex1 = re.compile(r'.*(.*).*', re.MULTILINE | re.DOTALL) -rex2 = re.compile(r'.*
(.*)
.*', re.MULTILINE | re.DOTALL) - -def strip_html_header(string, encoding='utf8'): - """ return the content of the body-tag """ - uni = unicode(string, encoding) - for rex in rex1,rex2: - match = rex.search(uni) - if not match: - break - uni = match.group(1) - return uni - -class Project: # used for confrest.py files - def __init__(self, sourcepath): - self.sourcepath = sourcepath - def process(self, path): - return process(path) - def get_htmloutputpath(self, path): - return path.new(ext='html') diff --git a/py/_code/oldmagic.py b/py/_code/oldmagic.py deleted file mode 100644 --- a/py/_code/oldmagic.py +++ /dev/null @@ -1,62 +0,0 @@ -""" deprecated module for turning on/off some features. """ - -import py - -from py.builtin import builtins as cpy_builtin - -def invoke(assertion=False, compile=False): - """ (deprecated) invoke magic, currently you can specify: - - assertion patches the builtin AssertionError to try to give - more meaningful AssertionErrors, which by means - of deploying a mini-interpreter constructs - a useful error message. - """ - py.log._apiwarn("1.1", - "py.magic.invoke() is deprecated, use py.code.patch_builtins()", - stacklevel=2, - ) - py.code.patch_builtins(assertion=assertion, compile=compile) - -def revoke(assertion=False, compile=False): - """ (deprecated) revoke previously invoked magic (see invoke()).""" - py.log._apiwarn("1.1", - "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()", - stacklevel=2, - ) - py.code.unpatch_builtins(assertion=assertion, compile=compile) - -patched = {} - -def patch(namespace, name, value): - """ (deprecated) rebind the 'name' on the 'namespace' to the 'value', - possibly and remember the original value. Multiple - invocations to the same namespace/name pair will - remember a list of old values. - """ - py.log._apiwarn("1.1", - "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - orig = getattr(namespace, name) - patched.setdefault(nref, []).append(orig) - setattr(namespace, name, value) - return orig - -def revert(namespace, name): - """ (deprecated) revert to the orginal value the last patch modified. - Raise ValueError if no such original value exists. - """ - py.log._apiwarn("1.1", - "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - if nref not in patched or not patched[nref]: - raise ValueError("No original value stored for %s.%s" % nref) - current = getattr(namespace, name) - orig = patched[nref].pop() - setattr(namespace, name, orig) - return current - diff --git a/py/_plugin/pytest_hooklog.py b/py/_plugin/pytest_hooklog.py deleted file mode 100644 --- a/py/_plugin/pytest_hooklog.py +++ /dev/null @@ -1,33 +0,0 @@ -""" log invocations of extension hooks to a file. """ -import py - -def pytest_addoption(parser): - parser.addoption("--hooklog", dest="hooklog", default=None, - help="write hook calls to the given file.") - -def pytest_configure(config): - hooklog = config.getvalue("hooklog") - if hooklog: - config._hooklogfile = open(hooklog, 'w') - config._hooklog_oldperformcall = config.hook._performcall - config.hook._performcall = (lambda name, multicall: - logged_call(name=name, multicall=multicall, config=config)) - -def logged_call(name, multicall, config): - f = config._hooklogfile - f.write("%s(**%s)\n" % (name, multicall.kwargs)) - try: - res = config._hooklog_oldperformcall(name=name, multicall=multicall) - except: - f.write("-> exception") - raise - f.write("-> %r" % (res,)) - return res - -def pytest_unconfigure(config): - try: - del config.hook.__dict__['_performcall'] - except KeyError: - pass - else: - config._hooklogfile.close() diff --git a/py/_cmdline/pycleanup.py b/py/_cmdline/pycleanup.py deleted file mode 100755 --- a/py/_cmdline/pycleanup.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.cleanup [PATH] ... - -Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot. Optionally remove setup.py related files and empty -directories. - -""" -import py -import sys, subprocess - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - parser.add_option("-e", metavar="ENDING", - dest="endings", default=[".pyc", "$py.class"], action="append", - help=("(multi) recursively remove files with the given ending." - " '.pyc' and '$py.class' are in the default list.")) - parser.add_option("-d", action="store_true", dest="removedir", - help="remove empty directories.") - parser.add_option("-s", action="store_true", dest="setup", - help="remove 'build' and 'dist' directories next to setup.py files") - parser.add_option("-a", action="store_true", dest="all", - help="synonym for '-S -d -e pip-log.txt'") - parser.add_option("-n", "--dryrun", dest="dryrun", default=False, - action="store_true", - help="don't actually delete but display would-be-removed filenames.") - (options, args) = parser.parse_args() - - Cleanup(options, args).main() - -class Cleanup: - def __init__(self, options, args): - if not args: - args = ["."] - self.options = options - self.args = [py.path.local(x) for x in args] - if options.all: - options.setup = True - options.removedir = True - options.endings.append("pip-log.txt") - - def main(self): - if self.options.setup: - for arg in self.args: - self.setupclean(arg) - - for path in self.args: - py.builtin.print_("cleaning path", path, - "of extensions", self.options.endings) - for x in path.visit(self.shouldremove, self.recursedir): - self.remove(x) - if self.options.removedir: - for x in path.visit(lambda x: x.check(dir=1), self.recursedir): - if not x.listdir(): - self.remove(x) - - def shouldremove(self, p): - for ending in self.options.endings: - if p.basename.endswith(ending): - return True - - def recursedir(self, path): - return path.check(dotfile=0, link=0) - - def remove(self, path): - if not path.check(): - return - if self.options.dryrun: - py.builtin.print_("would remove", path) - else: - py.builtin.print_("removing", path) - path.remove() - - def XXXcallsetup(self, setup, *args): - old = setup.dirpath().chdir() - try: - subprocess.call([sys.executable, str(setup)] + list(args)) - finally: - old.chdir() - - def setupclean(self, path): - for x in path.visit("setup.py", self.recursedir): - basepath = x.dirpath() - self.remove(basepath / "build") - self.remove(basepath / "dist") diff --git a/py/bin/py.which b/py/bin/py.which deleted file mode 100755 --- a/py/bin/py.which +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pywhich() \ No newline at end of file diff --git a/py/_plugin/pytest_junitxml.py b/py/_plugin/pytest_junitxml.py deleted file mode 100644 --- a/py/_plugin/pytest_junitxml.py +++ /dev/null @@ -1,171 +0,0 @@ -""" - logging of test results in JUnit-XML format, for use with Hudson - and build integration servers. Based on initial code from Ross Lawley. -""" - -import py -import time - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group.addoption('--junitxml', action="store", dest="xmlpath", - metavar="path", default=None, - help="create junit-xml style report file at given path.") - -def pytest_configure(config): - xmlpath = config.option.xmlpath - if xmlpath: - config._xml = LogXML(xmlpath) - config.pluginmanager.register(config._xml) - -def pytest_unconfigure(config): - xml = getattr(config, '_xml', None) - if xml: - del config._xml - config.pluginmanager.unregister(xml) - -class LogXML(object): - def __init__(self, logfile): - self.logfile = logfile - self.test_logs = [] - self.passed = self.skipped = 0 - self.failed = self.errors = 0 - self._durations = {} - - def _opentestcase(self, report): - node = report.item - d = {'time': self._durations.pop(report.item, "0")} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def _closetestcase(self): - self.test_logs.append("") - - def appendlog(self, fmt, *args): - args = tuple([py.xml.escape(arg) for arg in args]) - self.test_logs.append(fmt % args) - - def append_pass(self, report): - self.passed += 1 - self._opentestcase(report) - self._closetestcase() - - def append_failure(self, report): - self._opentestcase(report) - #msg = str(report.longrepr.reprtraceback.extraline) - if "xfail" in report.keywords: - self.appendlog( - '') - self.skipped += 1 - else: - self.appendlog('%s', - report.longrepr) - self.failed += 1 - self._closetestcase() - - def _opentestcase_collectfailure(self, report): - node = report.collector - d = {'time': '???'} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def append_collect_failure(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_collect_skipped(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.skipped += 1 - - def append_error(self, report): - self._opentestcase(report) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_skipped(self, report): - self._opentestcase(report) - if "xfail" in report.keywords: - self.appendlog( - '%s', - report.keywords['xfail']) - else: - self.appendlog("") - self._closetestcase() - self.skipped += 1 - - def pytest_runtest_logreport(self, report): - if report.passed: - self.append_pass(report) - elif report.failed: - if report.when != "call": - self.append_error(report) - else: - self.append_failure(report) - elif report.skipped: - self.append_skipped(report) - - def pytest_runtest_call(self, item, __multicall__): - start = time.time() - try: - return __multicall__.execute() - finally: - self._durations[item] = time.time() - start - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.append_collect_failure(report) - else: - self.append_collect_skipped(report) - - def pytest_internalerror(self, excrepr): - self.errors += 1 - data = py.xml.escape(excrepr) - self.test_logs.append( - '\n' - ' ' - '%s' % data) - - def pytest_sessionstart(self, session): - self.suite_start_time = time.time() - - def pytest_sessionfinish(self, session, exitstatus, __multicall__): - if py.std.sys.version_info[0] < 3: - logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8') - else: - logfile = open(self.logfile, 'w', encoding='utf-8') - - suite_stop_time = time.time() - suite_time_delta = suite_stop_time - self.suite_start_time - numtests = self.passed + self.failed - logfile.write('') - logfile.write('') - logfile.writelines(self.test_logs) - logfile.write('') - logfile.close() - tw = session.config.pluginmanager.getplugin("terminalreporter")._tw - tw.line() - tw.sep("-", "generated xml file: %s" %(self.logfile)) diff --git a/py/_cmdline/pywhich.py b/py/_cmdline/pywhich.py deleted file mode 100755 --- a/py/_cmdline/pywhich.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.which [name] - -print the location of the given python module or package name -""" - -import sys - -def main(): - name = sys.argv[1] - try: - mod = __import__(name) - except ImportError: - sys.stderr.write("could not import: " + name + "\n") - else: - try: - location = mod.__file__ - except AttributeError: - sys.stderr.write("module (has no __file__): " + str(mod)) - else: - print(location) diff --git a/py/_path/gateway/remotepath.py b/py/_path/gateway/remotepath.py deleted file mode 100644 --- a/py/_path/gateway/remotepath.py +++ /dev/null @@ -1,47 +0,0 @@ -import py, itertools -from py._path import common - -COUNTER = itertools.count() - -class RemotePath(common.PathBase): - sep = '/' - - def __init__(self, channel, id, basename=None): - self._channel = channel - self._id = id - self._basename = basename - self._specs = {} - - def __del__(self): - self._channel.send(('DEL', self._id)) - - def __repr__(self): - return 'RemotePath(%s)' % self.basename - - def listdir(self, *args): - self._channel.send(('LIST', self._id) + args) - return [RemotePath(self._channel, id, basename) - for (id, basename) in self._channel.receive()] - - def dirpath(self): - id = ~COUNTER.next() - self._channel.send(('DIRPATH', self._id, id)) - return RemotePath(self._channel, id) - - def join(self, *args): - id = ~COUNTER.next() - self._channel.send(('JOIN', self._id, id) + args) - return RemotePath(self._channel, id) - - def _getbyspec(self, spec): - parts = spec.split(',') - ask = [x for x in parts if x not in self._specs] - if ask: - self._channel.send(('GET', self._id, ",".join(ask))) - for part, value in zip(ask, self._channel.receive()): - self._specs[part] = value - return [self._specs[x] for x in parts] - - def read(self): - self._channel.send(('READ', self._id)) - return self._channel.receive() diff --git a/py/_plugin/pytest_doctest.py b/py/_plugin/pytest_doctest.py deleted file mode 100644 --- a/py/_plugin/pytest_doctest.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -collect and execute doctests from modules and test files. - -Usage -------------- - -By default all files matching the ``test*.txt`` pattern will -be run through the python standard ``doctest`` module. Issue:: - - py.test --doctest-glob='*.rst' - -to change the pattern. Additionally you can trigger running of -tests in all python modules (including regular python test modules):: - - py.test --doctest-modules - -You can also make these changes permanent in your project by -putting them into a conftest.py file like this:: - - # content of conftest.py - option_doctestmodules = True - option_doctestglob = "*.rst" -""" - -import py -from py._code.code import TerminalRepr, ReprFileLocation -import doctest - -def pytest_addoption(parser): - group = parser.getgroup("collect") - group.addoption("--doctest-modules", - action="store_true", default=False, - help="run doctests in all .py modules", - dest="doctestmodules") - group.addoption("--doctest-glob", - action="store", default="test*.txt", metavar="pat", - help="doctests file matching pattern, default: test*.txt", - dest="doctestglob") - -def pytest_collect_file(path, parent): - config = parent.config - if path.ext == ".py": - if config.getvalue("doctestmodules"): - return DoctestModule(path, parent) - elif path.check(fnmatch=config.getvalue("doctestglob")): - return DoctestTextfile(path, parent) - -class ReprFailDoctest(TerminalRepr): - def __init__(self, reprlocation, lines): - self.reprlocation = reprlocation - self.lines = lines - def toterminal(self, tw): - for line in self.lines: - tw.line(line) - self.reprlocation.toterminal(tw) - -class DoctestItem(py.test.collect.Item): - def __init__(self, path, parent): - name = self.__class__.__name__ + ":" + path.basename - super(DoctestItem, self).__init__(name=name, parent=parent) - self.fspath = path - - def repr_failure(self, excinfo): - if excinfo.errisinstance(doctest.DocTestFailure): - doctestfailure = excinfo.value - example = doctestfailure.example - test = doctestfailure.test - filename = test.filename - lineno = test.lineno + example.lineno + 1 - message = excinfo.type.__name__ - reprlocation = ReprFileLocation(filename, lineno, message) - checker = doctest.OutputChecker() - REPORT_UDIFF = doctest.REPORT_UDIFF - filelines = py.path.local(filename).readlines(cr=0) - i = max(test.lineno, max(0, lineno - 10)) # XXX? - lines = [] - for line in filelines[i:lineno]: - lines.append("%03d %s" % (i+1, line)) - i += 1 - lines += checker.output_difference(example, - doctestfailure.got, REPORT_UDIFF).split("\n") - return ReprFailDoctest(reprlocation, lines) - elif excinfo.errisinstance(doctest.UnexpectedException): - excinfo = py.code.ExceptionInfo(excinfo.value.exc_info) - return super(DoctestItem, self).repr_failure(excinfo) - else: - return super(DoctestItem, self).repr_failure(excinfo) - -class DoctestTextfile(DoctestItem): - def runtest(self): - if not self._deprecated_testexecution(): - failed, tot = doctest.testfile( - str(self.fspath), module_relative=False, - raise_on_error=True, verbose=0) - -class DoctestModule(DoctestItem): - def runtest(self): - module = self.fspath.pyimport() - failed, tot = doctest.testmod( - module, raise_on_error=True, verbose=0) diff --git a/py/bin/py.svnwcrevert b/py/bin/py.svnwcrevert deleted file mode 100755 --- a/py/bin/py.svnwcrevert +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pysvnwcrevert() \ No newline at end of file diff --git a/py/bin/win32/py.test.cmd b/py/bin/win32/py.test.cmd deleted file mode 100644 --- a/py/bin/win32/py.test.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.test" %* \ No newline at end of file diff --git a/py/_test/config.py b/py/_test/config.py deleted file mode 100644 --- a/py/_test/config.py +++ /dev/null @@ -1,291 +0,0 @@ -import py, os -from py._test.conftesthandle import Conftest -from py._test.pluginmanager import PluginManager -from py._test import parseopt -from py._test.collect import RootCollector - -def ensuretemp(string, dir=1): - """ (deprecated) return temporary directory path with - the given string as the trailing part. It is usually - better to use the 'tmpdir' function argument which will - take care to provide empty unique directories for each - test call even if the test is called multiple times. - """ - #py.log._apiwarn(">1.1", "use tmpdir function argument") - return py.test.config.ensuretemp(string, dir=dir) - -class CmdOptions(object): - """ holds cmdline options as attributes.""" - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - def __repr__(self): - return "" %(self.__dict__,) - -class Error(Exception): - """ Test Configuration Error. """ - -class Config(object): - """ access to config values, pluginmanager and plugin hooks. """ - Option = py.std.optparse.Option - Error = Error - basetemp = None - _sessionclass = None - - def __init__(self, topdir=None, option=None): - self.option = option or CmdOptions() - self.topdir = topdir - self._parser = parseopt.Parser( - usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]", - processopt=self._processopt, - ) - self.pluginmanager = PluginManager() - self._conftest = Conftest(onimport=self._onimportconftest) - self.hook = self.pluginmanager.hook - - def _onimportconftest(self, conftestmodule): - self.trace("loaded conftestmodule %r" %(conftestmodule,)) - self.pluginmanager.consider_conftest(conftestmodule) - - def _getmatchingplugins(self, fspath): - allconftests = self._conftest._conftestpath2mod.values() - plugins = [x for x in self.pluginmanager.getplugins() - if x not in allconftests] - plugins += self._conftest.getconftestmodules(fspath) - return plugins - - def trace(self, msg): - if getattr(self.option, 'traceconfig', None): - self.hook.pytest_trace(category="config", msg=msg) - - def _processopt(self, opt): - if hasattr(opt, 'default') and opt.dest: - val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None) - if val is not None: - if opt.type == "int": - val = int(val) - elif opt.type == "long": - val = long(val) - elif opt.type == "float": - val = float(val) - elif not opt.type and opt.action in ("store_true", "store_false"): - val = eval(val) - opt.default = val - else: - name = "option_" + opt.dest - try: - opt.default = self._conftest.rget(name) - except (ValueError, KeyError): - pass - if not hasattr(self.option, opt.dest): - setattr(self.option, opt.dest, opt.default) - - def _preparse(self, args): - self.pluginmanager.consider_setuptools_entrypoints() - self.pluginmanager.consider_env() - self.pluginmanager.consider_preparse(args) - self._conftest.setinitial(args) - self.pluginmanager.do_addoption(self._parser) - - def parse(self, args): - """ parse cmdline arguments into this config object. - Note that this can only be called once per testing process. - """ - assert not hasattr(self, 'args'), ( - "can only parse cmdline args at most once per Config object") - self._preparse(args) - self._parser.hints.extend(self.pluginmanager._hints) - args = self._parser.parse_setoption(args, self.option) - if not args: - args.append(py.std.os.getcwd()) - self.topdir = gettopdir(args) - self._rootcol = RootCollector(config=self) - self._setargs(args) - - def _setargs(self, args): - self.args = list(args) - self._argfspaths = [py.path.local(decodearg(x)[0]) for x in args] - - # config objects are usually pickled across system - # barriers but they contain filesystem paths. - # upon getstate/setstate we take care to do everything - # relative to "topdir". - def __getstate__(self): - l = [] - for path in self.args: - path = py.path.local(path) - l.append(path.relto(self.topdir)) - return l, self.option.__dict__ - - def __setstate__(self, repr): - # we have to set py.test.config because loading - # of conftest files may use it (deprecated) - # mainly by py.test.config.addoptions() - global config_per_process - py.test.config = config_per_process = self - args, cmdlineopts = repr - cmdlineopts = CmdOptions(**cmdlineopts) - # next line will registers default plugins - self.__init__(topdir=py.path.local(), option=cmdlineopts) - self._rootcol = RootCollector(config=self) - args = [str(self.topdir.join(x)) for x in args] - self._preparse(args) - self._setargs(args) - - def ensuretemp(self, string, dir=True): - return self.getbasetemp().ensure(string, dir=dir) - - def getbasetemp(self): - if self.basetemp is None: - basetemp = self.option.basetemp - if basetemp: - basetemp = py.path.local(basetemp) - if not basetemp.check(dir=1): - basetemp.mkdir() - else: - basetemp = py.path.local.make_numbered_dir(prefix='pytest-') - self.basetemp = basetemp - return self.basetemp - - def mktemp(self, basename, numbered=False): - basetemp = self.getbasetemp() - if not numbered: - return basetemp.mkdir(basename) - else: - return py.path.local.make_numbered_dir(prefix=basename, - keep=0, rootdir=basetemp, lock_timeout=None) - - def getinitialnodes(self): - return [self.getnode(arg) for arg in self.args] - - def getnode(self, arg): - parts = decodearg(arg) - path = py.path.local(parts.pop(0)) - if not path.check(): - raise self.Error("file not found: %s" %(path,)) - topdir = self.topdir - if path != topdir and not path.relto(topdir): - raise self.Error("path %r is not relative to %r" % - (str(path), str(topdir))) - # assumtion: pytest's fs-collector tree follows the filesystem tree - names = list(filter(None, path.relto(topdir).split(path.sep))) - names += parts - try: - return self._rootcol.getbynames(names) - except ValueError: - e = py.std.sys.exc_info()[1] - raise self.Error("can't collect: %s\n%s" % (arg, e.args[0])) - - def _getcollectclass(self, name, path): - try: - cls = self._conftest.rget(name, path) - except KeyError: - return getattr(py.test.collect, name) - else: - py.log._apiwarn(">1.1", "%r was found in a conftest.py file, " - "use pytest_collect hooks instead." % (cls,)) - return cls - - def getconftest_pathlist(self, name, path=None): - """ return a matching value, which needs to be sequence - of filenames that will be returned as a list of Path - objects (they can be relative to the location - where they were found). - """ - try: - mod, relroots = self._conftest.rget_with_confmod(name, path) - except KeyError: - return None - modpath = py.path.local(mod.__file__).dirpath() - l = [] - for relroot in relroots: - if not isinstance(relroot, py.path.local): - relroot = relroot.replace("/", py.path.local.sep) - relroot = modpath.join(relroot, abs=True) - l.append(relroot) - return l - - def addoptions(self, groupname, *specs): - """ add a named group of options to the current testing session. - This function gets invoked during testing session initialization. - """ - py.log._apiwarn("1.0", "define pytest_addoptions(parser) to add options", stacklevel=2) - group = self._parser.getgroup(groupname) - for opt in specs: - group._addoption_instance(opt) - return self.option - - def addoption(self, *optnames, **attrs): - return self._parser.addoption(*optnames, **attrs) - - def getvalueorskip(self, name, path=None): - """ return getvalue() or call py.test.skip if no value exists. """ - try: - val = self.getvalue(name, path) - if val is None: - raise KeyError(name) - return val - except KeyError: - py.test.skip("no %r value found" %(name,)) - - def getvalue(self, name, path=None): - """ return 'name' value looked up from the 'options' - and then from the first conftest file found up - the path (including the path itself). - if path is None, lookup the value in the initial - conftest modules found during command line parsing. - """ - try: - return getattr(self.option, name) - except AttributeError: - return self._conftest.rget(name, path) - - def setsessionclass(self, cls): - if self._sessionclass is not None: - raise ValueError("sessionclass already set to: %r" %( - self._sessionclass)) - self._sessionclass = cls - - def initsession(self): - """ return an initialized session object. """ - cls = self._sessionclass - if cls is None: - from py._test.session import Session - cls = Session - session = cls(self) - self.trace("instantiated session %r" % session) - return session - -# -# helpers -# - -def gettopdir(args): - """ return the top directory for the given paths. - if the common base dir resides in a python package - parent directory of the root package is returned. - """ - fsargs = [py.path.local(decodearg(arg)[0]) for arg in args] - p = fsargs and fsargs[0] or None - for x in fsargs[1:]: - p = p.common(x) - assert p, "cannot determine common basedir of %s" %(fsargs,) - pkgdir = p.pypkgpath() - if pkgdir is None: - if p.check(file=1): - p = p.dirpath() - return p - else: - return pkgdir.dirpath() - -def decodearg(arg): - arg = str(arg) - return arg.split("::") - -def onpytestaccess(): - # it's enough to have our containing module loaded as - # it initializes a per-process config instance - # which loads default plugins which add to py.test.* - pass - -# a default per-process instance of py.test configuration -config_per_process = Config() diff --git a/py/_plugin/pytest_assertion.py b/py/_plugin/pytest_assertion.py deleted file mode 100644 --- a/py/_plugin/pytest_assertion.py +++ /dev/null @@ -1,28 +0,0 @@ -import py -import sys - -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group._addoption('--no-assert', action="store_true", default=False, - dest="noassert", - help="disable python assert expression reinterpretation."), - -def pytest_configure(config): - if not config.getvalue("noassert") and not config.getvalue("nomagic"): - warn_about_missing_assertion() - config._oldassertion = py.builtin.builtins.AssertionError - py.builtin.builtins.AssertionError = py.code._AssertionError - -def pytest_unconfigure(config): - if hasattr(config, '_oldassertion'): - py.builtin.builtins.AssertionError = config._oldassertion - del config._oldassertion - -def warn_about_missing_assertion(): - try: - assert False - except AssertionError: - pass - else: - py.std.warnings.warn("Assertions are turned off!" - " (are you using python -O?)") diff --git a/py/bin/py.countloc b/py/bin/py.countloc deleted file mode 100755 --- a/py/bin/py.countloc +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycountloc() \ No newline at end of file diff --git a/py/_plugin/pytest__pytest.py b/py/_plugin/pytest__pytest.py deleted file mode 100644 --- a/py/_plugin/pytest__pytest.py +++ /dev/null @@ -1,101 +0,0 @@ -import py - -from py._test.pluginmanager import HookRelay - -def pytest_funcarg___pytest(request): - return PytestArg(request) - -class PytestArg: - def __init__(self, request): - self.request = request - - def gethookrecorder(self, hook): - hookrecorder = HookRecorder(hook._registry) - hookrecorder.start_recording(hook._hookspecs) - self.request.addfinalizer(hookrecorder.finish_recording) - return hookrecorder - -class ParsedCall: - def __init__(self, name, locals): - assert '_name' not in locals - self.__dict__.update(locals) - self.__dict__.pop('self') - self._name = name - - def __repr__(self): - d = self.__dict__.copy() - del d['_name'] - return "" %(self._name, d) - -class HookRecorder: - def __init__(self, registry): - self._registry = registry - self.calls = [] - self._recorders = {} - - def start_recording(self, hookspecs): - if not isinstance(hookspecs, (list, tuple)): - hookspecs = [hookspecs] - for hookspec in hookspecs: - assert hookspec not in self._recorders - class RecordCalls: - _recorder = self - for name, method in vars(hookspec).items(): - if name[0] != "_": - setattr(RecordCalls, name, self._makecallparser(method)) - recorder = RecordCalls() - self._recorders[hookspec] = recorder - self._registry.register(recorder) - self.hook = HookRelay(hookspecs, registry=self._registry, - prefix="pytest_") - - def finish_recording(self): - for recorder in self._recorders.values(): - self._registry.unregister(recorder) - self._recorders.clear() - - def _makecallparser(self, method): - name = method.__name__ - args, varargs, varkw, default = py.std.inspect.getargspec(method) - if not args or args[0] != "self": - args.insert(0, 'self') - fspec = py.std.inspect.formatargspec(args, varargs, varkw, default) - # we use exec because we want to have early type - # errors on wrong input arguments, using - # *args/**kwargs delays this and gives errors - # elsewhere - exec (py.code.compile(""" - def %(name)s%(fspec)s: - self._recorder.calls.append( - ParsedCall(%(name)r, locals())) - """ % locals())) - return locals()[name] - - def getcalls(self, names): - if isinstance(names, str): - names = names.split() - for name in names: - for cls in self._recorders: - if name in vars(cls): - break - else: - raise ValueError("callname %r not found in %r" %( - name, self._recorders.keys())) - l = [] - for call in self.calls: - if call._name in names: - l.append(call) - return l - - def popcall(self, name): - for i, call in enumerate(self.calls): - if call._name == name: - del self.calls[i] - return call - raise ValueError("could not find call %r" %(name, )) - - def getcall(self, name): - l = self.getcalls(name) - assert len(l) == 1, (name, l) - return l[0] - diff --git a/py/_plugin/pytest_helpconfig.py b/py/_plugin/pytest_helpconfig.py deleted file mode 100644 --- a/py/_plugin/pytest_helpconfig.py +++ /dev/null @@ -1,164 +0,0 @@ -""" provide version info, conftest/environment config names. -""" -import py -import inspect, sys - -def pytest_addoption(parser): - group = parser.getgroup('debugconfig') - group.addoption('--version', action="store_true", - help="display py lib version and import information.") - group._addoption('-p', action="append", dest="plugins", default = [], - metavar="name", - help="early-load given plugin (multi-allowed).") - group.addoption('--traceconfig', - action="store_true", dest="traceconfig", default=False, - help="trace considerations of conftest.py files."), - group._addoption('--nomagic', - action="store_true", dest="nomagic", default=False, - help="don't reinterpret asserts, no traceback cutting. ") - group.addoption('--debug', - action="store_true", dest="debug", default=False, - help="generate and show internal debugging information.") - group.addoption("--help-config", action="store_true", dest="helpconfig", - help="show available conftest.py and ENV-variable names.") - - -def pytest_configure(__multicall__, config): - if config.option.version: - p = py.path.local(py.__file__).dirpath() - sys.stderr.write("This is py.test version %s, imported from %s\n" % - (py.__version__, p)) - sys.exit(0) - if not config.option.helpconfig: - return - __multicall__.execute() - options = [] - for group in config._parser._groups: - options.extend(group.options) - widths = [0] * 10 - tw = py.io.TerminalWriter() - tw.sep("-") - tw.line("%-13s | %-18s | %-25s | %s" %( - "cmdline name", "conftest.py name", "ENV-variable name", "help")) - tw.sep("-") - - options = [opt for opt in options if opt._long_opts] - options.sort(key=lambda x: x._long_opts) - for opt in options: - if not opt._long_opts or not opt.dest: - continue - optstrings = list(opt._long_opts) # + list(opt._short_opts) - optstrings = filter(None, optstrings) - optstring = "|".join(optstrings) - line = "%-13s | %-18s | %-25s | %s" %( - optstring, - "option_%s" % opt.dest, - "PYTEST_OPTION_%s" % opt.dest.upper(), - opt.help and opt.help or "", - ) - tw.line(line[:tw.fullwidth]) - for name, help in conftest_options: - line = "%-13s | %-18s | %-25s | %s" %( - "", - name, - "", - help, - ) - tw.line(line[:tw.fullwidth]) - - tw.sep("-") - sys.exit(0) - -conftest_options = ( - ('pytest_plugins', 'list of plugin names to load'), - ('collect_ignore', '(relative) paths ignored during collection'), - ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), -) - -def pytest_report_header(config): - lines = [] - if config.option.debug or config.option.traceconfig: - lines.append("using py lib: %s" % (py.path.local(py.__file__).dirpath())) - if config.option.traceconfig: - lines.append("active plugins:") - plugins = [] - items = config.pluginmanager._name2plugin.items() - for name, plugin in items: - lines.append(" %-20s: %s" %(name, repr(plugin))) - return lines - - -# ===================================================== -# validate plugin syntax and hooks -# ===================================================== - -def pytest_plugin_registered(manager, plugin): - methods = collectattr(plugin) - hooks = {} - for hookspec in manager.hook._hookspecs: - hooks.update(collectattr(hookspec)) - - stringio = py.io.TextIO() - def Print(*args): - if args: - stringio.write(" ".join(map(str, args))) - stringio.write("\n") - - fail = False - while methods: - name, method = methods.popitem() - #print "checking", name - if isgenerichook(name): - continue - if name not in hooks: - if not getattr(method, 'optionalhook', False): - Print("found unknown hook:", name) - fail = True - else: - #print "checking", method - method_args = getargs(method) - #print "method_args", method_args - if '__multicall__' in method_args: - method_args.remove('__multicall__') - hook = hooks[name] - hookargs = getargs(hook) - for arg in method_args: - if arg not in hookargs: - Print("argument %r not available" %(arg, )) - Print("actual definition: %s" %(formatdef(method))) - Print("available hook arguments: %s" % - ", ".join(hookargs)) - fail = True - break - #if not fail: - # print "matching hook:", formatdef(method) - if fail: - name = getattr(plugin, '__name__', plugin) - raise PluginValidationError("%s:\n%s" %(name, stringio.getvalue())) - -class PluginValidationError(Exception): - """ plugin failed validation. """ - -def isgenerichook(name): - return name == "pytest_plugins" or \ - name.startswith("pytest_funcarg__") - -def getargs(func): - args = inspect.getargs(py.code.getrawcode(func))[0] - startindex = inspect.ismethod(func) and 1 or 0 - return args[startindex:] - -def collectattr(obj, prefixes=("pytest_",)): - methods = {} - for apiname in dir(obj): - for prefix in prefixes: - if apiname.startswith(prefix): - methods[apiname] = getattr(obj, apiname) - return methods - -def formatdef(func): - return "%s%s" %( - func.__name__, - inspect.formatargspec(*inspect.getargspec(func)) - ) - diff --git a/py/_plugin/pytest_genscript.py b/py/_plugin/pytest_genscript.py deleted file mode 100755 --- a/py/_plugin/pytest_genscript.py +++ /dev/null @@ -1,69 +0,0 @@ -#! /usr/bin/env python -""" -generate standalone test script to be distributed along with an application. -""" - -import os -import sys -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group.addoption("--genscript", action="store", default=None, - dest="genscript", metavar="path", - help="create standalone py.test script at given target path.") - -def pytest_configure(config): - genscript = config.getvalue("genscript") - if genscript: - import py - mydir = py.path.local(__file__).dirpath() - infile = mydir.join("standalonetemplate.py") - pybasedir = py.path.local(py.__file__).dirpath().dirpath() - genscript = py.path.local(genscript) - main(pybasedir, outfile=genscript, infile=infile) - raise SystemExit(0) - -def main(pybasedir, outfile, infile): - import base64 - import zlib - try: - import pickle - except Importerror: - import cPickle as pickle - - outfile = str(outfile) - infile = str(infile) - assert os.path.isabs(outfile) - os.chdir(str(pybasedir)) - files = [] - for dirpath, dirnames, filenames in os.walk("py"): - for f in filenames: - if not f.endswith(".py"): - continue - - fn = os.path.join(dirpath, f) - files.append(fn) - - name2src = {} - for f in files: - k = f.replace(os.sep, ".")[:-3] - name2src[k] = open(f, "r").read() - - data = pickle.dumps(name2src, 2) - data = zlib.compress(data, 9) - data = base64.encodestring(data) - data = data.decode("ascii") - - exe = open(infile, "r").read() - exe = exe.replace("@SOURCES@", data) - - open(outfile, "w").write(exe) - os.chmod(outfile, 493) # 0755 - sys.stdout.write("generated standalone py.test at %r, have fun!\n" % outfile) - -if __name__=="__main__": - dn = os.path.dirname - here = os.path.abspath(dn(__file__)) # py/plugin/ - pybasedir = dn(dn(here)) - outfile = os.path.join(os.getcwd(), "py.test-standalone") - infile = os.path.join(here, 'standalonetemplate.py') - main(pybasedir, outfile, infile) diff --git a/py/_plugin/pytest_resultlog.py b/py/_plugin/pytest_resultlog.py deleted file mode 100644 --- a/py/_plugin/pytest_resultlog.py +++ /dev/null @@ -1,98 +0,0 @@ -"""non-xml machine-readable logging of test results. - Useful for buildbot integration code. See the `PyPy-test`_ - web page for post-processing. - -.. _`PyPy-test`: http://codespeak.net:8099/summary - -""" - -import py -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("resultlog", "resultlog plugin options") - group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None, - help="path for machine-readable result log.") - -def pytest_configure(config): - resultlog = config.option.resultlog - if resultlog: - logfile = open(resultlog, 'w', 1) # line buffered - config._resultlog = ResultLog(config, logfile) - config.pluginmanager.register(config._resultlog) - -def pytest_unconfigure(config): - resultlog = getattr(config, '_resultlog', None) - if resultlog: - resultlog.logfile.close() - del config._resultlog - config.pluginmanager.unregister(resultlog) - -def generic_path(item): - chain = item.listchain() - gpath = [chain[0].name] - fspath = chain[0].fspath - fspart = False - for node in chain[1:]: - newfspath = node.fspath - if newfspath == fspath: - if fspart: - gpath.append(':') - fspart = False - else: - gpath.append('.') - else: - gpath.append('/') - fspart = True - name = node.name - if name[0] in '([': - gpath.pop() - gpath.append(name) - fspath = newfspath - return ''.join(gpath) - -class ResultLog(object): - def __init__(self, config, logfile): - self.config = config - self.logfile = logfile # preferably line buffered - - def write_log_entry(self, testpath, shortrepr, longrepr): - print_("%s %s" % (shortrepr, testpath), file=self.logfile) - for line in longrepr.splitlines(): - print_(" %s" % line, file=self.logfile) - - def log_outcome(self, node, shortrepr, longrepr): - testpath = generic_path(node) - self.write_log_entry(testpath, shortrepr, longrepr) - - def pytest_runtest_logreport(self, report): - res = self.config.hook.pytest_report_teststatus(report=report) - if res is not None: - code = res[1] - else: - code = report.shortrepr - if code == 'x': - longrepr = str(report.longrepr) - elif code == 'X': - longrepr = '' - elif report.passed: - longrepr = "" - elif report.failed: - longrepr = str(report.longrepr) - elif report.skipped: - longrepr = str(report.longrepr.reprcrash.message) - self.log_outcome(report.item, code, longrepr) - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - code = "F" - else: - assert report.skipped - code = "S" - longrepr = str(report.longrepr.reprcrash) - self.log_outcome(report.collector, code, longrepr) - - def pytest_internalerror(self, excrepr): - path = excrepr.reprcrash.path - self.write_log_entry(path, '!', str(excrepr)) diff --git a/py/bin/win32/py.countloc.cmd b/py/bin/win32/py.countloc.cmd deleted file mode 100644 --- a/py/bin/win32/py.countloc.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.countloc" %* \ No newline at end of file diff --git a/py/bin/env.cmd b/py/bin/env.cmd deleted file mode 100644 --- a/py/bin/env.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i diff --git a/py/bin/py.cleanup b/py/bin/py.cleanup deleted file mode 100755 --- a/py/bin/py.cleanup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycleanup() \ No newline at end of file diff --git a/py/_test/pluginmanager.py b/py/_test/pluginmanager.py deleted file mode 100644 --- a/py/_test/pluginmanager.py +++ /dev/null @@ -1,353 +0,0 @@ -""" -managing loading and interacting with pytest plugins. -""" -import py -import inspect -from py._plugin import hookspec - -default_plugins = ( - "default runner capture mark terminal skipping tmpdir monkeypatch " - "recwarn pdb pastebin unittest helpconfig nose assertion genscript " - "junitxml doctest").split() - -def check_old_use(mod, modname): - clsname = modname[len('pytest_'):].capitalize() + "Plugin" - assert not hasattr(mod, clsname), (mod, clsname) - -class PluginManager(object): - def __init__(self): - self.registry = Registry() - self._name2plugin = {} - self._hints = [] - self.hook = HookRelay([hookspec], registry=self.registry) - self.register(self) - for spec in default_plugins: - self.import_plugin(spec) - - def _getpluginname(self, plugin, name): - if name is None: - if hasattr(plugin, '__name__'): - name = plugin.__name__.split(".")[-1] - else: - name = id(plugin) - return name - - def register(self, plugin, name=None): - assert not self.isregistered(plugin), plugin - assert not self.registry.isregistered(plugin), plugin - name = self._getpluginname(plugin, name) - if name in self._name2plugin: - return False - self._name2plugin[name] = plugin - self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self}) - self.hook.pytest_plugin_registered(manager=self, plugin=plugin) - self.registry.register(plugin) - return True - - def unregister(self, plugin): - self.hook.pytest_plugin_unregistered(plugin=plugin) - self.registry.unregister(plugin) - for name, value in list(self._name2plugin.items()): - if value == plugin: - del self._name2plugin[name] - - def isregistered(self, plugin, name=None): - if self._getpluginname(plugin, name) in self._name2plugin: - return True - for val in self._name2plugin.values(): - if plugin == val: - return True - - def addhooks(self, spec): - self.hook._addhooks(spec, prefix="pytest_") - - def getplugins(self): - return list(self.registry) - - def skipifmissing(self, name): - if not self.hasplugin(name): - py.test.skip("plugin %r is missing" % name) - - def hasplugin(self, name): - try: - self.getplugin(name) - except KeyError: - return False - else: - return True - - def getplugin(self, name): - try: - return self._name2plugin[name] - except KeyError: - impname = canonical_importname(name) - return self._name2plugin[impname] - - # API for bootstrapping - # - def _envlist(self, varname): - val = py.std.os.environ.get(varname, None) - if val is not None: - return val.split(',') - return () - - def consider_env(self): - for spec in self._envlist("PYTEST_PLUGINS"): - self.import_plugin(spec) - - def consider_setuptools_entrypoints(self): - try: - from pkg_resources import iter_entry_points - except ImportError: - return # XXX issue a warning - for ep in iter_entry_points('pytest11'): - name = canonical_importname(ep.name) - if name in self._name2plugin: - continue - plugin = ep.load() - self.register(plugin, name=name) - - def consider_preparse(self, args): - for opt1,opt2 in zip(args, args[1:]): - if opt1 == "-p": - self.import_plugin(opt2) - - def consider_conftest(self, conftestmodule): - cls = getattr(conftestmodule, 'ConftestPlugin', None) - if cls is not None: - raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, " - "were removed in 1.0.0b2" % (cls,)) - if self.register(conftestmodule, name=conftestmodule.__file__): - self.consider_module(conftestmodule) - - def consider_module(self, mod): - attr = getattr(mod, "pytest_plugins", ()) - if attr: - if not isinstance(attr, (list, tuple)): - attr = (attr,) - for spec in attr: - self.import_plugin(spec) - - def import_plugin(self, spec): - assert isinstance(spec, str) - modname = canonical_importname(spec) - if modname in self._name2plugin: - return - try: - mod = importplugin(modname) - except KeyboardInterrupt: - raise - except py.test.skip.Exception: - e = py.std.sys.exc_info()[1] - self._hints.append("skipped plugin %r: %s" %((modname, e.msg))) - else: - check_old_use(mod, modname) - self.register(mod) - self.consider_module(mod) - - def pytest_terminal_summary(self, terminalreporter): - tw = terminalreporter._tw - if terminalreporter.config.option.traceconfig: - for hint in self._hints: - tw.line("hint: %s" % hint) - - # - # - # API for interacting with registered and instantiated plugin objects - # - # - def listattr(self, attrname, plugins=None): - return self.registry.listattr(attrname, plugins=plugins) - - def notify_exception(self, excinfo=None): - if excinfo is None: - excinfo = py.code.ExceptionInfo() - excrepr = excinfo.getrepr(funcargs=True, showlocals=True) - return self.hook.pytest_internalerror(excrepr=excrepr) - - def do_addoption(self, parser): - mname = "pytest_addoption" - methods = self.registry.listattr(mname, reverse=True) - mc = MultiCall(methods, {'parser': parser}) - mc.execute() - - def pytest_plugin_registered(self, plugin): - dic = self.call_plugin(plugin, "pytest_namespace", {}) or {} - for name, value in dic.items(): - setattr(py.test, name, value) - py.test.__all__.append(name) - if hasattr(self, '_config'): - self.call_plugin(plugin, "pytest_addoption", - {'parser': self._config._parser}) - self.call_plugin(plugin, "pytest_configure", - {'config': self._config}) - - def call_plugin(self, plugin, methname, kwargs): - return MultiCall( - methods=self.listattr(methname, plugins=[plugin]), - kwargs=kwargs, firstresult=True).execute() - - def do_configure(self, config): - assert not hasattr(self, '_config') - self._config = config - config.hook.pytest_configure(config=self._config) - - def do_unconfigure(self, config): - config = self._config - del self._config - config.hook.pytest_unconfigure(config=config) - config.pluginmanager.unregister(self) - -def canonical_importname(name): - name = name.lower() - modprefix = "pytest_" - if not name.startswith(modprefix): - name = modprefix + name - return name - -def importplugin(importspec): - try: - return __import__(importspec) - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - try: - return __import__("py._plugin.%s" %(importspec), - None, None, '__doc__') - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - # show the original exception, not the failing internal one - return __import__(importspec) - - -class MultiCall: - """ execute a call into multiple python functions/methods. """ - - def __init__(self, methods, kwargs, firstresult=False): - self.methods = methods[:] - self.kwargs = kwargs.copy() - self.kwargs['__multicall__'] = self - self.results = [] - self.firstresult = firstresult - - def __repr__(self): - status = "%d results, %d meths" % (len(self.results), len(self.methods)) - return "" %(status, self.kwargs) - - def execute(self): - while self.methods: - method = self.methods.pop() - kwargs = self.getkwargs(method) - res = method(**kwargs) - if res is not None: - self.results.append(res) - if self.firstresult: - return res - if not self.firstresult: - return self.results - - def getkwargs(self, method): - kwargs = {} - for argname in varnames(method): - try: - kwargs[argname] = self.kwargs[argname] - except KeyError: - pass # might be optional param - return kwargs - -def varnames(func): - ismethod = inspect.ismethod(func) - rawcode = py.code.getrawcode(func) - try: - return rawcode.co_varnames[ismethod:] - except AttributeError: - return () - -class Registry: - """ - Manage Plugins: register/unregister call calls to plugins. - """ - def __init__(self, plugins=None): - if plugins is None: - plugins = [] - self._plugins = plugins - - def register(self, plugin): - assert not isinstance(plugin, str) - assert not plugin in self._plugins - self._plugins.append(plugin) - - def unregister(self, plugin): - self._plugins.remove(plugin) - - def isregistered(self, plugin): - return plugin in self._plugins - - def __iter__(self): - return iter(self._plugins) - - def listattr(self, attrname, plugins=None, reverse=False): - l = [] - if plugins is None: - plugins = self._plugins - for plugin in plugins: - try: - l.append(getattr(plugin, attrname)) - except AttributeError: - continue - if reverse: - l.reverse() - return l - -class HookRelay: - def __init__(self, hookspecs, registry, prefix="pytest_"): - if not isinstance(hookspecs, list): - hookspecs = [hookspecs] - self._hookspecs = [] - self._registry = registry - for hookspec in hookspecs: - self._addhooks(hookspec, prefix) - - def _addhooks(self, hookspecs, prefix): - self._hookspecs.append(hookspecs) - added = False - for name, method in vars(hookspecs).items(): - if name.startswith(prefix): - if not method.__doc__: - raise ValueError("docstring required for hook %r, in %r" - % (method, hookspecs)) - firstresult = getattr(method, 'firstresult', False) - hc = HookCaller(self, name, firstresult=firstresult) - setattr(self, name, hc) - added = True - #print ("setting new hook", name) - if not added: - raise ValueError("did not find new %r hooks in %r" %( - prefix, hookspecs,)) - - - def _performcall(self, name, multicall): - return multicall.execute() - -class HookCaller: - def __init__(self, hookrelay, name, firstresult): - self.hookrelay = hookrelay - self.name = name - self.firstresult = firstresult - - def __repr__(self): - return "" %(self.name,) - - def __call__(self, **kwargs): - methods = self.hookrelay._registry.listattr(self.name) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - - def pcall(self, plugins, **kwargs): - methods = self.hookrelay._registry.listattr(self.name, plugins=plugins) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - diff --git a/py/_cmdline/pytest.py b/py/_cmdline/pytest.py deleted file mode 100755 --- a/py/_cmdline/pytest.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -import py - -def main(args=None): - raise SystemExit(py.test.cmdline.main(args)) diff --git a/py/_plugin/standalonetemplate.py b/py/_plugin/standalonetemplate.py deleted file mode 100755 --- a/py/_plugin/standalonetemplate.py +++ /dev/null @@ -1,63 +0,0 @@ -#! /usr/bin/env python - -sources = """ - at SOURCES@""" - -import sys -import base64 -import zlib -import imp - -class DictImporter(object): - def __init__(self, sources): - self.sources = sources - - def find_module(self, fullname, path=None): - if fullname in self.sources: - return self - if fullname+'.__init__' in self.sources: - return self - return None - - def load_module(self, fullname): - # print "load_module:", fullname - from types import ModuleType - try: - s = self.sources[fullname] - is_pkg = False - except KeyError: - s = self.sources[fullname+'.__init__'] - is_pkg = True - - co = compile(s, fullname, 'exec') - module = sys.modules.setdefault(fullname, ModuleType(fullname)) - module.__file__ = "%s/%s" % (__file__, fullname) - module.__loader__ = self - if is_pkg: - module.__path__ = [fullname] - - do_exec(co, module.__dict__) - return sys.modules[fullname] - - def get_source(self, name): - res = self.sources.get(name) - if res is None: - res = self.sources.get(name+'.__init__') - return res - -if __name__ == "__main__": - if sys.version_info >= (3,0): - exec("def do_exec(co, loc): exec(co, loc)\n") - import pickle - sources = sources.encode("ascii") # ensure bytes - sources = pickle.loads(zlib.decompress(base64.decodebytes(sources))) - else: - import cPickle as pickle - exec("def do_exec(co, loc): exec co in loc\n") - sources = pickle.loads(zlib.decompress(base64.decodestring(sources))) - - importer = DictImporter(sources) - sys.meta_path.append(importer) - - import py - py.cmdline.pytest() diff --git a/py/bin/win32/py.lookup.cmd b/py/bin/win32/py.lookup.cmd deleted file mode 100644 --- a/py/bin/win32/py.lookup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.lookup" %* \ No newline at end of file diff --git a/py/bin/win32/py.which.cmd b/py/bin/win32/py.which.cmd deleted file mode 100644 --- a/py/bin/win32/py.which.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.which" %* \ No newline at end of file diff --git a/py/_compat/dep_optparse.py b/py/_compat/dep_optparse.py deleted file mode 100644 --- a/py/_compat/dep_optparse.py +++ /dev/null @@ -1,4 +0,0 @@ -import py -py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg") - -optparse = py.std.optparse diff --git a/py/_compat/dep_textwrap.py b/py/_compat/dep_textwrap.py deleted file mode 100644 --- a/py/_compat/dep_textwrap.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", - stacklevel="apipkg") -textwrap = py.std.textwrap diff --git a/py/apipkg.py b/py/apipkg.py deleted file mode 100644 --- a/py/apipkg.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -apipkg: control the exported namespace of a python package. - -see http://pypi.python.org/pypi/apipkg - -(c) holger krekel, 2009 - MIT license -""" -import sys -from types import ModuleType - -__version__ = "1.0b6" - -def initpkg(pkgname, exportdefs): - """ initialize given package from the export definitions. """ - mod = ApiModule(pkgname, exportdefs, implprefix=pkgname) - oldmod = sys.modules[pkgname] - mod.__file__ = getattr(oldmod, '__file__', None) - mod.__version__ = getattr(oldmod, '__version__', '0') - for name in ('__path__', '__loader__'): - if hasattr(oldmod, name): - setattr(mod, name, getattr(oldmod, name)) - sys.modules[pkgname] = mod - -def importobj(modpath, attrname): - module = __import__(modpath, None, None, ['__doc__']) - return getattr(module, attrname) - -class ApiModule(ModuleType): - def __init__(self, name, importspec, implprefix=None): - self.__name__ = name - self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] - self.__map__ = {} - self.__implprefix__ = implprefix or name - for name, importspec in importspec.items(): - if isinstance(importspec, dict): - subname = '%s.%s'%(self.__name__, name) - apimod = ApiModule(subname, importspec, implprefix) - sys.modules[subname] = apimod - setattr(self, name, apimod) - else: - modpath, attrname = importspec.split(':') - if modpath[0] == '.': - modpath = implprefix + modpath - if name == '__doc__': - self.__doc__ = importobj(modpath, attrname) - else: - self.__map__[name] = (modpath, attrname) - - def __repr__(self): - l = [] - if hasattr(self, '__version__'): - l.append("version=" + repr(self.__version__)) - if hasattr(self, '__file__'): - l.append('from ' + repr(self.__file__)) - if l: - return '' % (self.__name__, " ".join(l)) - return '' % (self.__name__,) - - def __makeattr(self, name): - """lazily compute value for name or raise AttributeError if unknown.""" - target = None - if '__onfirstaccess__' in self.__map__: - target = self.__map__.pop('__onfirstaccess__') - importobj(*target)() - try: - modpath, attrname = self.__map__[name] - except KeyError: - if target is not None and name != '__onfirstaccess__': - # retry, onfirstaccess might have set attrs - return getattr(self, name) - raise AttributeError(name) - else: - result = importobj(modpath, attrname) - setattr(self, name, result) - try: - del self.__map__[name] - except KeyError: - pass # in a recursive-import situation a double-del can happen - return result - - __getattr__ = __makeattr - - def __dict__(self): - # force all the content of the module to be loaded when __dict__ is read - dictdescr = ModuleType.__dict__['__dict__'] - dict = dictdescr.__get__(self) - if dict is not None: - hasattr(self, 'some') - for name in self.__all__: - try: - self.__makeattr(name) - except AttributeError: - pass - return dict - __dict__ = property(__dict__) diff --git a/pypy/tool/test/test_conftest1.py b/pypy/tool/test/test_conftest1.py deleted file mode 100644 --- a/pypy/tool/test/test_conftest1.py +++ /dev/null @@ -1,32 +0,0 @@ - -import py - -innertest = py.path.local(__file__).dirpath('conftest1_innertest.py') -pytest_plugins = "pytest_pytester" - -class TestPyPyTests: - def test_select_interplevel(self, testdir): - sorter = testdir.inline_run("-k", "interplevel", innertest) - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - assert not skipped and not failed - for repevent in passed: - assert repevent.item.name in ('test_something', 'test_method') - - def test_select_applevel(self, testdir): - sorter = testdir.inline_run("-k", "applevel", innertest) - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - assert not skipped and not failed - for repevent in passed: - assert repevent.item.name in ('app_test_something', 'test_method_app') - - def test_appdirect(self, testdir): - sorter = testdir.inline_run(innertest, '-k', 'applevel', '--runappdirect') - passed, skipped, failed = sorter.listoutcomes() - assert len(passed) == 2 - print passed - names = [x.item.name for x in passed] - assert 'app_test_something' in names - assert 'test_method_app' in names - diff --git a/py/bin/win32/py.cleanup.cmd b/py/bin/win32/py.cleanup.cmd deleted file mode 100644 --- a/py/bin/win32/py.cleanup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.cleanup" %* \ No newline at end of file diff --git a/py/_plugin/pytest_terminal.py b/py/_plugin/pytest_terminal.py deleted file mode 100644 --- a/py/_plugin/pytest_terminal.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -Implements terminal reporting of the full testing process. - -This is a good source for looking at the various reporting hooks. -""" -import py -import sys - -optionalhook = py.test.mark.optionalhook - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting", "reporting", after="general") - group._addoption('-v', '--verbose', action="count", - dest="verbose", default=0, help="increase verbosity."), - group._addoption('-r', - action="store", dest="reportchars", default=None, metavar="chars", - help="show extra test summary info as specified by chars (f)ailed, " - "(s)skipped, (x)failed, (X)passed.") - group._addoption('-l', '--showlocals', - action="store_true", dest="showlocals", default=False, - help="show locals in tracebacks (disabled by default).") - group._addoption('--report', - action="store", dest="report", default=None, metavar="opts", - help="(deprecated, use -r)") - group._addoption('--tb', metavar="style", - action="store", dest="tbstyle", default='long', - type="choice", choices=['long', 'short', 'no', 'line'], - help="traceback print mode (long/short/line/no).") - group._addoption('--fulltrace', - action="store_true", dest="fulltrace", default=False, - help="don't cut any tracebacks (default is to cut).") - group._addoption('--funcargs', - action="store_true", dest="showfuncargs", default=False, - help="show available function arguments, sorted by plugin") - -def pytest_configure(config): - if config.option.collectonly: - reporter = CollectonlyReporter(config) - elif config.option.showfuncargs: - config.setsessionclass(ShowFuncargSession) - reporter = None - else: - reporter = TerminalReporter(config) - if reporter: - # XXX see remote.py's XXX - for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth': - if hasattr(config, attr): - #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr) - name = attr.split("_")[-1] - assert hasattr(self.reporter._tw, name), name - setattr(reporter._tw, name, getattr(config, attr)) - config.pluginmanager.register(reporter, 'terminalreporter') - -def getreportopt(config): - reportopts = "" - optvalue = config.getvalue("report") - if optvalue: - py.builtin.print_("DEPRECATED: use -r instead of --report option.", - file=py.std.sys.stderr) - if optvalue: - for setting in optvalue.split(","): - setting = setting.strip() - if setting == "skipped": - reportopts += "s" - elif setting == "xfailed": - reportopts += "x" - reportchars = config.getvalue("reportchars") - if reportchars: - for char in reportchars: - if char not in reportopts: - reportopts += char - return reportopts - -class TerminalReporter: - def __init__(self, config, file=None): - self.config = config - self.stats = {} - self.curdir = py.path.local() - if file is None: - file = py.std.sys.stdout - self._tw = py.io.TerminalWriter(file) - self.currentfspath = None - self.gateway2info = {} - self.reportchars = getreportopt(config) - - def hasopt(self, char): - char = {'xfailed': 'x', 'skipped': 's'}.get(char,char) - return char in self.reportchars - - def write_fspath_result(self, fspath, res): - fspath = self.curdir.bestrelpath(fspath) - if fspath != self.currentfspath: - self._tw.line() - relpath = self.curdir.bestrelpath(fspath) - self._tw.write(relpath + " ") - self.currentfspath = fspath - self._tw.write(res) - - def write_ensure_prefix(self, prefix, extra="", **kwargs): - if self.currentfspath != prefix: - self._tw.line() - self.currentfspath = prefix - self._tw.write(prefix) - if extra: - self._tw.write(extra, **kwargs) - self.currentfspath = -2 - - def ensure_newline(self): - if self.currentfspath: - self._tw.line() - self.currentfspath = None - - def write_line(self, line, **markup): - line = str(line) - self.ensure_newline() - self._tw.line(line, **markup) - - def write_sep(self, sep, title=None, **markup): - self.ensure_newline() - self._tw.sep(sep, title, **markup) - - def getcategoryletterword(self, rep): - res = self.config.hook.pytest_report_teststatus(report=rep) - if res: - return res - for cat in 'skipped failed passed ???'.split(): - if getattr(rep, cat, None): - break - return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep) - - def getoutcomeletter(self, rep): - return rep.shortrepr - - def getoutcomeword(self, rep): - if rep.passed: - return "PASS", dict(green=True) - elif rep.failed: - return "FAIL", dict(red=True) - elif rep.skipped: - return "SKIP" - else: - return "???", dict(red=True) - - def gettestid(self, item, relative=True): - fspath = item.fspath - chain = [x for x in item.listchain() if x.fspath == fspath] - chain = chain[1:] - names = [x.name for x in chain if x.name != "()"] - path = item.fspath - if relative: - relpath = path.relto(self.curdir) - if relpath: - path = relpath - names.insert(0, str(path)) - return "::".join(names) - - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.write_line("INTERNALERROR> " + line) - - def pytest_plugin_registered(self, plugin): - if self.config.option.traceconfig: - msg = "PLUGIN registered: %s" %(plugin,) - # XXX this event may happen during setup/teardown time - # which unfortunately captures our output here - # which garbles our output if we use self.write_line - self.write_line(msg) - - @optionalhook - def pytest_gwmanage_newgateway(self, gateway, platinfo): - #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec)) - d = {} - d['version'] = repr_pythonversion(platinfo.version_info) - d['id'] = gateway.id - d['spec'] = gateway.spec._spec - d['platform'] = platinfo.platform - if self.config.option.verbose: - d['extra'] = "- " + platinfo.executable - else: - d['extra'] = "" - d['cwd'] = platinfo.cwd - infoline = ("[%(id)s] %(spec)s -- platform %(platform)s, " - "Python %(version)s " - "cwd: %(cwd)s" - "%(extra)s" % d) - self.write_line(infoline) - self.gateway2info[gateway] = infoline - - @optionalhook - def pytest_testnodeready(self, node): - self.write_line("[%s] txnode ready to receive tests" %(node.gateway.id,)) - - @optionalhook - def pytest_testnodedown(self, node, error): - if error: - self.write_line("[%s] node down, error: %s" %(node.gateway.id, error)) - - @optionalhook - def pytest_rescheduleitems(self, items): - if self.config.option.debug: - self.write_sep("!", "RESCHEDULING %s " %(items,)) - - @optionalhook - def pytest_looponfailinfo(self, failreports, rootdirs): - if failreports: - self.write_sep("#", "LOOPONFAILING", red=True) - for report in failreports: - loc = self._getcrashline(report) - self.write_line(loc, red=True) - self.write_sep("#", "waiting for changes") - for rootdir in rootdirs: - self.write_line("### Watching: %s" %(rootdir,), bold=True) - - - def pytest_trace(self, category, msg): - if self.config.option.debug or \ - self.config.option.traceconfig and category.find("config") != -1: - self.write_line("[%s] %s" %(category, msg)) - - def pytest_deselected(self, items): - self.stats.setdefault('deselected', []).append(items) - - def pytest_itemstart(self, item, node=None): - if getattr(self.config.option, 'dist', 'no') != "no": - # for dist-testing situations itemstart means we - # queued the item for sending, not interesting (unless debugging) - if self.config.option.debug: - line = self._reportinfoline(item) - extra = "" - if node: - extra = "-> [%s]" % node.gateway.id - self.write_ensure_prefix(line, extra) - else: - if self.config.option.verbose: - line = self._reportinfoline(item) - self.write_ensure_prefix(line, "") - else: - # ensure that the path is printed before the - # 1st test of a module starts running - - self.write_fspath_result(self._getfspath(item), "") - - def pytest__teardown_final_logerror(self, report): - self.stats.setdefault("error", []).append(report) - - def pytest_runtest_logreport(self, report): - rep = report - cat, letter, word = self.getcategoryletterword(rep) - if not letter and not word: - # probably passed setup/teardown - return - if isinstance(word, tuple): - word, markup = word - else: - markup = {} - self.stats.setdefault(cat, []).append(rep) - if not self.config.option.verbose: - self.write_fspath_result(self._getfspath(rep.item), letter) - else: - line = self._reportinfoline(rep.item) - if not hasattr(rep, 'node'): - self.write_ensure_prefix(line, word, **markup) - else: - self.ensure_newline() - if hasattr(rep, 'node'): - self._tw.write("[%s] " % rep.node.gateway.id) - self._tw.write(word, **markup) - self._tw.write(" " + line) - self.currentfspath = -2 - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.stats.setdefault("error", []).append(report) - msg = report.longrepr.reprcrash.message - self.write_fspath_result(report.collector.fspath, "E") - elif report.skipped: - self.stats.setdefault("skipped", []).append(report) - self.write_fspath_result(report.collector.fspath, "S") - - def pytest_sessionstart(self, session): - self.write_sep("=", "test session starts", bold=True) - self._sessionstarttime = py.std.time.time() - - verinfo = ".".join(map(str, sys.version_info[:3])) - msg = "platform %s -- Python %s" % (sys.platform, verinfo) - msg += " -- pytest-%s" % (py.__version__) - if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None): - msg += " -- " + str(sys.executable) - self.write_line(msg) - lines = self.config.hook.pytest_report_header(config=self.config) - lines.reverse() - for line in flatten(lines): - self.write_line(line) - for i, testarg in enumerate(self.config.args): - self.write_line("test object %d: %s" %(i+1, testarg)) - - def pytest_sessionfinish(self, exitstatus, __multicall__): - __multicall__.execute() - self._tw.line("") - if exitstatus in (0, 1, 2): - self.summary_errors() - self.summary_failures() - self.config.hook.pytest_terminal_summary(terminalreporter=self) - if exitstatus == 2: - self._report_keyboardinterrupt() - self.summary_deselected() - self.summary_stats() - - def pytest_keyboard_interrupt(self, excinfo): - self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) - - def _report_keyboardinterrupt(self): - excrepr = self._keyboardinterrupt_memo - msg = excrepr.reprcrash.message - self.write_sep("!", msg) - if "KeyboardInterrupt" in msg: - if self.config.getvalue("fulltrace"): - excrepr.toterminal(self._tw) - else: - excrepr.reprcrash.toterminal(self._tw) - - def _getcrashline(self, report): - try: - return report.longrepr.reprcrash - except AttributeError: - return str(report.longrepr)[:50] - - def _reportinfoline(self, item): - collect_fspath = self._getfspath(item) - fspath, lineno, msg = self._getreportinfo(item) - if fspath and fspath != collect_fspath: - fspath = "%s <- %s" % ( - self.curdir.bestrelpath(collect_fspath), - self.curdir.bestrelpath(fspath)) - elif fspath: - fspath = self.curdir.bestrelpath(fspath) - if lineno is not None: - lineno += 1 - if fspath and lineno and msg: - line = "%(fspath)s:%(lineno)s: %(msg)s" - elif fspath and msg: - line = "%(fspath)s: %(msg)s" - elif fspath and lineno: - line = "%(fspath)s:%(lineno)s %(extrapath)s" - else: - line = "[noreportinfo]" - return line % locals() + " " - - def _getfailureheadline(self, rep): - if hasattr(rep, "collector"): - return str(rep.collector.fspath) - elif hasattr(rep, 'item'): - fspath, lineno, msg = self._getreportinfo(rep.item) - return msg - else: - return "test session" - - def _getreportinfo(self, item): - try: - return item.__reportinfo - except AttributeError: - pass - reportinfo = item.config.hook.pytest_report_iteminfo(item=item) - # cache on item - item.__reportinfo = reportinfo - return reportinfo - - def _getfspath(self, item): - try: - return item.fspath - except AttributeError: - fspath, lineno, msg = self._getreportinfo(item) - return fspath - - # - # summaries for sessionfinish - # - - def summary_failures(self): - tbstyle = self.config.getvalue("tbstyle") - if 'failed' in self.stats and tbstyle != "no": - self.write_sep("=", "FAILURES") - for rep in self.stats['failed']: - if tbstyle == "line": - line = self._getcrashline(rep) - self.write_line(line) - else: - msg = self._getfailureheadline(rep) - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def summary_errors(self): - if 'error' in self.stats and self.config.option.tbstyle != "no": - self.write_sep("=", "ERRORS") - for rep in self.stats['error']: - msg = self._getfailureheadline(rep) - if not hasattr(rep, 'when'): - # collect - msg = "ERROR during collection " + msg - elif rep.when == "setup": - msg = "ERROR at setup of " + msg - elif rep.when == "teardown": - msg = "ERROR at teardown of " + msg - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def write_platinfo(self, rep): - if hasattr(rep, 'node'): - self.write_line(self.gateway2info.get( - rep.node.gateway, - "node %r (platinfo not found? strange)") - [:self._tw.fullwidth-1]) - - def summary_stats(self): - session_duration = py.std.time.time() - self._sessionstarttime - - keys = "failed passed skipped deselected".split() - for key in self.stats.keys(): - if key not in keys: - keys.append(key) - parts = [] - for key in keys: - val = self.stats.get(key, None) - if val: - parts.append("%d %s" %(len(val), key)) - line = ", ".join(parts) - # XXX coloring - self.write_sep("=", "%s in %.2f seconds" %(line, session_duration)) - - def summary_deselected(self): - if 'deselected' in self.stats: - self.write_sep("=", "%d tests deselected by %r" %( - len(self.stats['deselected']), self.config.option.keyword), bold=True) - - -class CollectonlyReporter: - INDENT = " " - - def __init__(self, config, out=None): - self.config = config - if out is None: - out = py.std.sys.stdout - self.out = py.io.TerminalWriter(out) - self.indent = "" - self._failed = [] - - def outindent(self, line): - self.out.line(self.indent + str(line)) - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.out.line("INTERNALERROR> " + line) - - def pytest_collectstart(self, collector): - self.outindent(collector) - self.indent += self.INDENT - - def pytest_itemstart(self, item, node=None): - self.outindent(item) - - def pytest_collectreport(self, report): - if not report.passed: - self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message) - self._failed.append(report) - self.indent = self.indent[:-len(self.INDENT)] - - def pytest_sessionfinish(self, session, exitstatus): - if self._failed: - self.out.sep("!", "collection failures") - for rep in self._failed: - rep.toterminal(self.out) - - -def repr_pythonversion(v=None): - if v is None: - v = sys.version_info - try: - return "%s.%s.%s-%s-%s" % v - except (TypeError, ValueError): - return str(v) - -def flatten(l): - for x in l: - if isinstance(x, (list, tuple)): - for y in flatten(x): - yield y - else: - yield x - -from py._test.session import Session -class ShowFuncargSession(Session): - def main(self, colitems): - self.fspath = py.path.local() - self.sessionstarts() - try: - self.showargs(colitems[0]) - finally: - self.sessionfinishes(exitstatus=1) - - def showargs(self, colitem): - tw = py.io.TerminalWriter() - from py._test.funcargs import getplugins - from py._test.funcargs import FuncargRequest - plugins = getplugins(colitem, withpy=True) - verbose = self.config.getvalue("verbose") - for plugin in plugins: - available = [] - for name, factory in vars(plugin).items(): - if name.startswith(FuncargRequest._argprefix): - name = name[len(FuncargRequest._argprefix):] - if name not in available: - available.append([name, factory]) - if available: - pluginname = plugin.__name__ - for name, factory in available: - loc = self.getlocation(factory) - if verbose: - funcargspec = "%s -- %s" %(name, loc,) - else: - funcargspec = name - tw.line(funcargspec, green=True) - doc = factory.__doc__ or "" - if doc: - for line in doc.split("\n"): - tw.line(" " + line.strip()) - else: - tw.line(" %s: no docstring available" %(loc,), - red=True) - - def getlocation(self, function): - import inspect - fn = py.path.local(inspect.getfile(function)) - lineno = py.builtin._getcode(function).co_firstlineno - if fn.relto(self.fspath): - fn = fn.relto(self.fspath) - return "%s:%d" %(fn, lineno+1) diff --git a/py/_test/cmdline.py b/py/_test/cmdline.py deleted file mode 100644 --- a/py/_test/cmdline.py +++ /dev/null @@ -1,24 +0,0 @@ -import py -import sys - -# -# main entry point -# - -def main(args=None): - if args is None: - args = sys.argv[1:] - config = py.test.config - try: - config.parse(args) - config.pluginmanager.do_configure(config) - session = config.initsession() - colitems = config.getinitialnodes() - exitstatus = session.main(colitems) - config.pluginmanager.do_unconfigure(config) - except config.Error: - e = sys.exc_info()[1] - sys.stderr.write("ERROR: %s\n" %(e.args[0],)) - exitstatus = 3 - py.test.config = py.test.config.__class__() - return exitstatus diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -1,6 +1,8 @@ +import os from pypy.jit.metainterp.optimizeutil import _findall from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.rlib.objectmodel import we_are_translated +from pypy.jit.metainterp.jitexc import JitException from pypy.jit.metainterp.optimizeopt.optimizer import Optimization @@ -10,6 +12,9 @@ self.var_index_item = None self.var_index_indexvalue = None +class BogusPureField(JitException): + pass + class OptHeap(Optimization): """Cache repeated heap accesses""" @@ -298,6 +303,12 @@ d[value] = fieldvalue def optimize_SETFIELD_GC(self, op): + if self.has_pure_result(rop.GETFIELD_GC_PURE, [op.getarg(0)], + op.getdescr()): + os.write(2, '[bogus _immutable_field_ declaration: %s]\n' % + (op.getdescr().repr_of_descr())) + raise BogusPureField + # value = self.getvalue(op.getarg(0)) fieldvalue = self.getvalue(op.getarg(1)) cached_fieldvalue = self.read_cached_field(op.getdescr(), value) diff --git a/py/_cmdline/pylookup.py b/py/_cmdline/pylookup.py deleted file mode 100755 --- a/py/_cmdline/pylookup.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.lookup [search_directory] SEARCH_STRING [options] - -Looks recursively at Python files for a SEARCH_STRING, starting from the -present working directory. Prints the line, with the filename and line-number -prepended.""" - -import sys, os -import py -from py.io import ansi_print, get_terminal_width -import re - -def rec(p): - return p.check(dotfile=0) - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase", - help="ignore case distinctions") -parser.add_option("-C", "--context", action="store", type="int", dest="context", - default=0, help="How many lines of output to show") - -terminal_width = get_terminal_width() - -def find_indexes(search_line, string): - indexes = [] - before = 0 - while 1: - i = search_line.find(string, before) - if i == -1: - break - indexes.append(i) - before = i + len(string) - return indexes - -def main(): - (options, args) = parser.parse_args() - if len(args) == 2: - search_dir, string = args - search_dir = py.path.local(search_dir) - else: - search_dir = py.path.local() - string = args[0] - if options.ignorecase: - string = string.lower() - for x in search_dir.visit('*.py', rec): - # match filename directly - s = x.relto(search_dir) - if options.ignorecase: - s = s.lower() - if s.find(string) != -1: - sys.stdout.write("%s: filename matches %r" %(x, string) + "\n") - - try: - s = x.read() - except py.error.ENOENT: - pass # whatever, probably broken link (ie emacs lock) - searchs = s - if options.ignorecase: - searchs = s.lower() - if s.find(string) != -1: - lines = s.splitlines() - if options.ignorecase: - searchlines = s.lower().splitlines() - else: - searchlines = lines - for i, (line, searchline) in enumerate(zip(lines, searchlines)): - indexes = find_indexes(searchline, string) - if not indexes: - continue - if not options.context: - sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1)) - last_index = 0 - for index in indexes: - sys.stdout.write(line[last_index: index]) - ansi_print(line[index: index+len(string)], - file=sys.stdout, esc=31, newline=False) - last_index = index + len(string) - sys.stdout.write(line[last_index:] + "\n") - else: - context = (options.context)/2 - for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)): - print("%s:%d: %s" %(x.relto(search_dir), count+1, lines[count].rstrip())) - print("-" * terminal_width) diff --git a/py/_path/gateway/__init__.py b/py/_path/gateway/__init__.py deleted file mode 100644 --- a/py/_path/gateway/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_path/gateway/channeltest2.py b/py/_path/gateway/channeltest2.py deleted file mode 100644 --- a/py/_path/gateway/channeltest2.py +++ /dev/null @@ -1,21 +0,0 @@ -import py -from remotepath import RemotePath - - -SRC = open('channeltest.py', 'r').read() - -SRC += ''' -import py -srv = PathServer(channel.receive()) -channel.send(srv.p2c(py.path.local("/tmp"))) -''' - - -#gw = execnet.SshGateway('codespeak.net') -gw = execnet.PopenGateway() -gw.remote_init_threads(5) -c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr) -subchannel = gw._channelfactory.new() -c.send(subchannel) - -p = RemotePath(subchannel, c.receive()) diff --git a/py/_test/session.py b/py/_test/session.py deleted file mode 100644 --- a/py/_test/session.py +++ /dev/null @@ -1,135 +0,0 @@ -""" basic test session implementation. - -* drives collection of tests -* triggers executions of tests -* produces events used by reporting -""" - -import py - -# exitcodes for the command line -EXIT_OK = 0 -EXIT_TESTSFAILED = 1 -EXIT_INTERRUPTED = 2 -EXIT_INTERNALERROR = 3 -EXIT_NOHOSTS = 4 - -# imports used for genitems() -Item = py.test.collect.Item -Collector = py.test.collect.Collector - -class Session(object): - nodeid = "" - class Interrupted(KeyboardInterrupt): - """ signals an interrupted test run. """ - __module__ = 'builtins' # for py3 - - def __init__(self, config): - self.config = config - self.pluginmanager = config.pluginmanager # shortcut - self.pluginmanager.register(self) - self._testsfailed = 0 - self._nomatch = False - self.shouldstop = False - - def genitems(self, colitems, keywordexpr=None): - """ yield Items from iterating over the given colitems. """ - if colitems: - colitems = list(colitems) - while colitems: - next = colitems.pop(0) - if isinstance(next, (tuple, list)): - colitems[:] = list(next) + colitems - continue - assert self.pluginmanager is next.config.pluginmanager - if isinstance(next, Item): - remaining = self.filteritems([next]) - if remaining: - self.config.hook.pytest_itemstart(item=next) - yield next - else: - assert isinstance(next, Collector) - self.config.hook.pytest_collectstart(collector=next) - rep = self.config.hook.pytest_make_collect_report(collector=next) - if rep.passed: - for x in self.genitems(rep.result, keywordexpr): - yield x - self.config.hook.pytest_collectreport(report=rep) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - - def filteritems(self, colitems): - """ return items to process (some may be deselected)""" - keywordexpr = self.config.option.keyword - if not keywordexpr or self._nomatch: - return colitems - if keywordexpr[-1] == ":": - keywordexpr = keywordexpr[:-1] - remaining = [] - deselected = [] - for colitem in colitems: - if isinstance(colitem, Item): - if colitem._skipbykeyword(keywordexpr): - deselected.append(colitem) - continue - remaining.append(colitem) - if deselected: - self.config.hook.pytest_deselected(items=deselected) - if self.config.option.keyword.endswith(":"): - self._nomatch = True - return remaining - - def collect(self, colitems): - keyword = self.config.option.keyword - for x in self.genitems(colitems, keyword): - yield x - - def sessionstarts(self): - """ setup any neccessary resources ahead of the test run. """ - self.config.hook.pytest_sessionstart(session=self) - - def pytest_runtest_logreport(self, report): - if report.failed: - self._testsfailed += 1 - maxfail = self.config.getvalue("maxfail") - if maxfail and self._testsfailed >= maxfail: - self.shouldstop = "stopping after %d failures" % ( - self._testsfailed) - pytest_collectreport = pytest_runtest_logreport - - def sessionfinishes(self, exitstatus): - """ teardown any resources after a test run. """ - self.config.hook.pytest_sessionfinish( - session=self, - exitstatus=exitstatus, - ) - - def main(self, colitems): - """ main loop for running tests. """ - self.shouldstop = False - self.sessionstarts() - exitstatus = EXIT_OK - try: - self._mainloop(colitems) - if self._testsfailed: - exitstatus = EXIT_TESTSFAILED - self.sessionfinishes(exitstatus=exitstatus) - except KeyboardInterrupt: - excinfo = py.code.ExceptionInfo() - self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo) - exitstatus = EXIT_INTERRUPTED - except: - excinfo = py.code.ExceptionInfo() - self.config.pluginmanager.notify_exception(excinfo) - exitstatus = EXIT_INTERNALERROR - if exitstatus in (EXIT_INTERNALERROR, EXIT_INTERRUPTED): - self.sessionfinishes(exitstatus=exitstatus) - return exitstatus - - def _mainloop(self, colitems): - for item in self.collect(colitems): - if not self.config.option.collectonly: - item.config.hook.pytest_runtest_protocol(item=item) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - diff --git a/py/_code/oldmagic2.py b/py/_code/oldmagic2.py deleted file mode 100644 --- a/py/_code/oldmagic2.py +++ /dev/null @@ -1,6 +0,0 @@ - -import py - -py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2) - -from py.code import _AssertionError as AssertionError From commits-noreply at bitbucket.org Wed Mar 16 11:14:44 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 16 Mar 2011 11:14:44 +0100 (CET) Subject: [pypy-svn] pypy default: Fixes test_virtual_streq_bug by introducing a dict storgin call_pure results indexed by its constant arguments. Breaks test_ztranslation Message-ID: <20110316101444.5CAC8282BDA@codespeak.net> Author: Hakan Ardo Branch: Changeset: r42705:581436e2ded2 Date: 2011-03-16 10:09 +0100 http://bitbucket.org/pypy/pypy/changeset/581436e2ded2/ Log: Fixes test_virtual_streq_bug by introducing a dict storgin call_pure results indexed by its constant arguments. Breaks test_ztranslation diff --git a/pypy/jit/metainterp/compile.py b/pypy/jit/metainterp/compile.py --- a/pypy/jit/metainterp/compile.py +++ b/pypy/jit/metainterp/compile.py @@ -39,7 +39,10 @@ def create_empty_loop(metainterp, name_prefix=''): name = metainterp.staticdata.stats.name_for_new_loop() - return TreeLoop(name_prefix + name) + loop = TreeLoop(name_prefix + name) + loop.call_pure_results = metainterp.call_pure_results + return loop + def make_loop_token(nb_args, jitdriver_sd): loop_token = LoopToken() diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py --- a/pypy/jit/metainterp/history.py +++ b/pypy/jit/metainterp/history.py @@ -787,6 +787,7 @@ inputargs = None operations = None token = None + call_pure_results = None def __init__(self, name): self.name = name diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -21,7 +21,7 @@ from pypy.rlib.objectmodel import specialize from pypy.jit.codewriter.jitcode import JitCode, SwitchDictDescr, MissingLiveness from pypy.jit.codewriter import heaptracker, longlong -from pypy.jit.metainterp.optimizeutil import RetraceLoop +from pypy.jit.metainterp.optimizeutil import RetraceLoop, args_dict_box, args_dict # ____________________________________________________________ @@ -1410,6 +1410,7 @@ self.free_frames_list = [] self.last_exc_value_box = None self.retracing_loop_from = None + self.call_pure_results = args_dict_box() def perform_call(self, jitcode, boxes, greenkey=None): # causes the metainterp to enter the given subfunction @@ -2278,7 +2279,9 @@ return resbox_as_const # not all constants (so far): turn CALL into CALL_PURE, which might # be either removed later by optimizeopt or turned back into CALL. - newop = op.copy_and_change(rop.CALL_PURE, args=[resbox_as_const]+op.getarglist()) + arg_consts = [a.constbox() for a in op.getarglist()] + self.call_pure_results[arg_consts] = resbox_as_const + newop = op.copy_and_change(rop.CALL_PURE, args=op.getarglist()) self.history.operations[-1] = newop return resbox diff --git a/pypy/jit/metainterp/optimizeopt/string.py b/pypy/jit/metainterp/optimizeopt/string.py --- a/pypy/jit/metainterp/optimizeopt/string.py +++ b/pypy/jit/metainterp/optimizeopt/string.py @@ -652,7 +652,7 @@ if not self.enabled: self.emit_operation(op) return - + opnum = op.getopnum() for value, func in optimize_ops: if opnum == value: diff --git a/pypy/jit/metainterp/test/test_compile.py b/pypy/jit/metainterp/test/test_compile.py --- a/pypy/jit/metainterp/test/test_compile.py +++ b/pypy/jit/metainterp/test/test_compile.py @@ -60,6 +60,7 @@ pass class FakeMetaInterp: + call_pure_results = {} class jitdriver_sd: warmstate = FakeState() diff --git a/pypy/jit/metainterp/optimizeopt/simplify.py b/pypy/jit/metainterp/optimizeopt/simplify.py --- a/pypy/jit/metainterp/optimizeopt/simplify.py +++ b/pypy/jit/metainterp/optimizeopt/simplify.py @@ -5,7 +5,7 @@ class OptSimplify(Optimization): def optimize_CALL_PURE(self, op): - args = op.getarglist()[1:] + args = op.getarglist() self.emit_operation(ResOperation(rop.CALL, args, op.result, op.getdescr())) diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -155,16 +155,24 @@ self.emit_operation(op) def optimize_CALL_PURE(self, op): + arg_consts = [] for i in range(op.numargs()): arg = op.getarg(i) - if self.get_constant_box(arg) is None: + const = self.get_constant_box(arg) + if const is None: break + arg_consts.append(const) else: - # all constant arguments: constant-fold away - self.make_constant(op.result, op.getarg(0)) - return + # all constant arguments: check if we already know the reslut + try: + result = self.optimizer.call_pure_results[arg_consts] + except KeyError: + pass + else: + self.make_constant(op.result, result) + return # replace CALL_PURE with just CALL - args = op.getarglist()[1:] + args = op.getarglist() self.emit_operation(ResOperation(rop.CALL, args, op.result, op.getdescr())) diff --git a/pypy/jit/metainterp/optimizeutil.py b/pypy/jit/metainterp/optimizeutil.py --- a/pypy/jit/metainterp/optimizeutil.py +++ b/pypy/jit/metainterp/optimizeutil.py @@ -108,3 +108,6 @@ def args_dict(): return r_dict(args_eq, args_hash) + +def args_dict_box(): + return r_dict(args_eq, args_hash) diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -258,6 +258,8 @@ self.posponedop = None self.exception_might_have_happened = False self.newoperations = [] + if loop is not None: + self.call_pure_results = loop.call_pure_results self.set_optimizations(optimizations) diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -14,6 +14,7 @@ from pypy.jit.metainterp.resoperation import rop, opname, ResOperation from pypy.jit.tool.oparser import pure_parse from pypy.jit.metainterp.test.test_optimizebasic import equaloplists +from pypy.jit.metainterp.optimizeutil import args_dict class Fake(object): failargs_limit = 1000 @@ -161,7 +162,8 @@ assert equaloplists(optimized.operations, expected.operations, False, remap, text_right) - def optimize_loop(self, ops, optops, expected_preamble=None): + def optimize_loop(self, ops, optops, expected_preamble=None, + call_pure_results=None): loop = self.parse(ops) if optops != "crash!": expected = self.parse(optops) @@ -171,6 +173,10 @@ expected_preamble = self.parse(expected_preamble) # self.loop = loop + loop.call_pure_results = args_dict() + if call_pure_results is not None: + for k, v in call_pure_results.items(): + loop.call_pure_results[list(k)] = v loop.preamble = TreeLoop('preamble') loop.preamble.inputargs = loop.inputargs loop.preamble.token = LoopToken() @@ -2893,7 +2899,7 @@ ops = ''' [p1, i1, i4] setfield_gc(p1, i1, descr=valuedescr) - i3 = call_pure(42, p1, descr=plaincalldescr) + i3 = call_pure(p1, descr=plaincalldescr) setfield_gc(p1, i3, descr=valuedescr) jump(p1, i4, i3) ''' @@ -2911,7 +2917,7 @@ ops = ''' [p1, i1, i4] setfield_gc(p1, i1, descr=valuedescr) - i3 = call_pure(42, p1, descr=plaincalldescr) + i3 = call_pure(p1, descr=plaincalldescr) setfield_gc(p1, i1, descr=valuedescr) jump(p1, i4, i3) ''' @@ -2931,12 +2937,14 @@ # the result of the call, recorded as the first arg), or turned into # a regular CALL. # XXX can this test be improved with unrolling? + arg_consts = [ConstInt(i) for i in (123456, 4, 5, 6)] + call_pure_results = {tuple(arg_consts): ConstInt(42)} ops = ''' [i0, i1, i2] escape(i1) escape(i2) - i3 = call_pure(42, 123456, 4, 5, 6, descr=plaincalldescr) - i4 = call_pure(43, 123456, 4, i0, 6, descr=plaincalldescr) + i3 = call_pure(123456, 4, 5, 6, descr=plaincalldescr) + i4 = call_pure(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, i3, i4) ''' preamble = ''' @@ -2953,7 +2961,7 @@ i4 = call(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, i4) ''' - self.optimize_loop(ops, expected, preamble) + self.optimize_loop(ops, expected, preamble, call_pure_results) # ---------- diff --git a/pypy/jit/metainterp/test/test_optimizebasic.py b/pypy/jit/metainterp/test/test_optimizebasic.py --- a/pypy/jit/metainterp/test/test_optimizebasic.py +++ b/pypy/jit/metainterp/test/test_optimizebasic.py @@ -11,6 +11,7 @@ from pypy.jit.metainterp import executor, compile, resume, history from pypy.jit.metainterp.resoperation import rop, opname, ResOperation from pypy.jit.tool.oparser import pure_parse +from pypy.jit.metainterp.optimizeutil import args_dict ##class FakeFrame(object): ## parent_resumedata_snapshot = None @@ -245,10 +246,14 @@ assert equaloplists(optimized.operations, expected.operations, False, remap) - def optimize_loop(self, ops, optops): + def optimize_loop(self, ops, optops, call_pure_results=None): loop = self.parse(ops) # self.loop = loop + loop.call_pure_results = args_dict() + if call_pure_results is not None: + for k, v in call_pure_results.items(): + loop.call_pure_results[list(k)] = v metainterp_sd = FakeMetaInterpStaticData(self.cpu) if hasattr(self, 'vrefinfo'): metainterp_sd.virtualref_info = self.vrefinfo @@ -2861,7 +2866,7 @@ ops = ''' [p1, i1] setfield_gc(p1, i1, descr=valuedescr) - i3 = call_pure(42, p1, descr=plaincalldescr) + i3 = call_pure(p1, descr=plaincalldescr) setfield_gc(p1, i3, descr=valuedescr) jump(p1, i3) ''' @@ -2880,12 +2885,14 @@ # time. Check that it is either constant-folded (and replaced by # the result of the call, recorded as the first arg), or turned into # a regular CALL. + arg_consts = [ConstInt(i) for i in (123456, 4, 5, 6)] + call_pure_results = {tuple(arg_consts): ConstInt(42)} ops = ''' [i0, i1, i2] escape(i1) escape(i2) - i3 = call_pure(42, 123456, 4, 5, 6, descr=plaincalldescr) - i4 = call_pure(43, 123456, 4, i0, 6, descr=plaincalldescr) + i3 = call_pure(123456, 4, 5, 6, descr=plaincalldescr) + i4 = call_pure(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, i3, i4) ''' expected = ''' @@ -2895,7 +2902,7 @@ i4 = call(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, 42, i4) ''' - self.optimize_loop(ops, expected) + self.optimize_loop(ops, expected, call_pure_results) def test_vref_nonvirtual_nonescape(self): ops = """ From commits-noreply at bitbucket.org Wed Mar 16 11:14:44 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 16 Mar 2011 11:14:44 +0100 (CET) Subject: [pypy-svn] pypy default: hg merge Message-ID: <20110316101444.A3675282BDB@codespeak.net> Author: Hakan Ardo Branch: Changeset: r42706:10d0ddb46990 Date: 2011-03-16 11:13 +0100 http://bitbucket.org/pypy/pypy/changeset/10d0ddb46990/ Log: hg merge From commits-noreply at bitbucket.org Wed Mar 16 11:18:38 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 11:18:38 +0100 (CET) Subject: [pypy-svn] pypy default: Simplify the logic and don't give a DeprecationWarning. Message-ID: <20110316101838.21C2F282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42707:e7e53bd568e9 Date: 2011-03-16 06:15 -0400 http://bitbucket.org/pypy/pypy/changeset/e7e53bd568e9/ Log: Simplify the logic and don't give a DeprecationWarning. I don't really understand why but it gives troubles to some lib-python tests. Maybe it's related to the fact that descr_init's logic was exactly like CPython's, but descr_new's logic was not. diff --git a/pypy/objspace/std/objecttype.py b/pypy/objspace/std/objecttype.py --- a/pypy/objspace/std/objecttype.py +++ b/pypy/objspace/std/objecttype.py @@ -78,19 +78,15 @@ return w_obj def descr__init__(space, w_obj, __args__): + # don't allow arguments unless __new__ is overridden w_type = space.type(w_obj) w_parent_new, _ = w_type.lookup_where('__new__') - w_parent_init, _ = w_type.lookup_where('__init__') - try: - __args__.fixedunpack(0) - except ValueError: - if w_parent_new is not space.w_object and w_parent_init is not space.w_object: - space.warn("object.__init__() takes no parameters", space.w_DeprecationWarning) - elif w_parent_new is space.w_object or w_parent_init is not space.w_object: + if w_parent_new is space.w_object: + try: + __args__.fixedunpack(0) + except ValueError: raise OperationError(space.w_TypeError, - space.wrap("object.__init__() takes no parameters") - ) - + space.wrap("object.__init__() takes no parameters")) @gateway.unwrap_spec(proto=int) diff --git a/pypy/objspace/std/test/test_obj.py b/pypy/objspace/std/test/test_obj.py --- a/pypy/objspace/std/test/test_obj.py +++ b/pypy/objspace/std/test/test_obj.py @@ -87,9 +87,10 @@ def __init__(self): super(B, self).__init__(a=3) - with warnings.catch_warnings(record=True) as log: - warnings.simplefilter("always", DeprecationWarning) - B() - assert len(log) == 1 - assert log[0].message.args == ("object.__init__() takes no parameters",) - assert type(log[0].message) is DeprecationWarning + #-- pypy doesn't raise the DeprecationWarning + #with warnings.catch_warnings(record=True) as log: + # warnings.simplefilter("always", DeprecationWarning) + # B() + #assert len(log) == 1 + #assert log[0].message.args == ("object.__init__() takes no parameters",) + #assert type(log[0].message) is DeprecationWarning From commits-noreply at bitbucket.org Wed Mar 16 11:48:20 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 11:48:20 +0100 (CET) Subject: [pypy-svn] pypy default: Remove simple_optimize, left behind. Message-ID: <20110316104820.A473F282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42708:1f6c9863ec3f Date: 2011-03-16 06:25 -0400 http://bitbucket.org/pypy/pypy/changeset/1f6c9863ec3f/ Log: Remove simple_optimize, left behind. diff --git a/pypy/jit/metainterp/simple_optimize.py b/pypy/jit/metainterp/simple_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/simple_optimize.py +++ /dev/null @@ -1,56 +0,0 @@ - -""" Simplified optimize.py -""" - -from pypy.jit.metainterp.resoperation import rop, ResOperation -from pypy.jit.metainterp import resume, compile - -EMPTY_VALUES = {} - -def transform(op): - from pypy.jit.metainterp.history import AbstractDescr - # Rename CALL_PURE and CALL_LOOPINVARIANT to CALL. - # Simplify the VIRTUAL_REF_* so that they don't show up in the backend. - if op.getopnum() == rop.CALL_PURE: - op = ResOperation(rop.CALL, op.getarglist()[1:], op.result, - op.getdescr()) - elif op.getopnum() == rop.CALL_LOOPINVARIANT: - op = op.copy_and_change(rop.CALL) - elif op.getopnum() == rop.VIRTUAL_REF: - op = ResOperation(rop.SAME_AS, [op.getarg(0)], op.result) - elif op.getopnum() == rop.VIRTUAL_REF_FINISH: - return [] - return [op] - -def optimize_loop(metainterp_sd, old_loops, loop): - if old_loops: - assert len(old_loops) == 1 - return old_loops[0] - else: - # copy loop operations here - # we need it since the backend can modify those lists, which make - # get_guard_op in compile.py invalid - # in fact, x86 modifies this list for moving GCs - memo = resume.ResumeDataLoopMemo(metainterp_sd) - newoperations = [] - for op in loop.operations: - if op.is_guard(): - descr = op.getdescr() - assert isinstance(descr, compile.ResumeGuardDescr) - modifier = resume.ResumeDataVirtualAdder(descr, memo) - newboxes = modifier.finish(EMPTY_VALUES) - descr.store_final_boxes(op, newboxes) - newoperations.extend(transform(op)) - loop.operations = newoperations - jumpop = newoperations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(loop.token) - return None - -def optimize_bridge(metainterp_sd, old_loops, loop, inline_short_preamble, - retraced): - optimize_loop(metainterp_sd, [], loop) - jumpop = loop.operations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(old_loops[0]) - return old_loops[0] diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -20,7 +20,6 @@ def _get_jitcodes(testself, CPUClass, func, values, type_system, supports_longlong=False, **kwds): from pypy.jit.codewriter import support, codewriter - from pypy.jit.metainterp import simple_optimize class FakeJitCell: __compiled_merge_points = [] @@ -38,10 +37,6 @@ return self._cell _cell = FakeJitCell() - # pick the optimizer this way - optimize_loop = staticmethod(simple_optimize.optimize_loop) - optimize_bridge = staticmethod(simple_optimize.optimize_bridge) - trace_limit = sys.maxint enable_opts = ALL_OPTS_DICT diff --git a/pypy/jit/metainterp/test/test_slist.py b/pypy/jit/metainterp/test/test_slist.py --- a/pypy/jit/metainterp/test/test_slist.py +++ b/pypy/jit/metainterp/test/test_slist.py @@ -50,7 +50,6 @@ assert res == 2 def test_make_list(self): - from pypy.jit.metainterp import simple_optimize myjitdriver = JitDriver(greens = [], reds = ['n', 'lst']) def f(n): lst = None diff --git a/pypy/jit/metainterp/test/test_tl.py b/pypy/jit/metainterp/test/test_tl.py --- a/pypy/jit/metainterp/test/test_tl.py +++ b/pypy/jit/metainterp/test/test_tl.py @@ -86,7 +86,6 @@ def test_tl_call(self, listops=True, policy=None): from pypy.jit.tl.tl import interp from pypy.jit.tl.tlopcode import compile - from pypy.jit.metainterp import simple_optimize code = compile(''' PUSHARG From commits-noreply at bitbucket.org Wed Mar 16 11:48:21 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 11:48:21 +0100 (CET) Subject: [pypy-svn] pypy default: Kill outdated comment. Message-ID: <20110316104821.4F158282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42709:2d1a02c6afa1 Date: 2011-03-16 06:29 -0400 http://bitbucket.org/pypy/pypy/changeset/2d1a02c6afa1/ Log: Kill outdated comment. diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -485,7 +485,6 @@ #'OOSEND', # ootype operation #'OOSEND_PURE', # ootype operation 'CALL_PURE/*d', # removed before it's passed to the backend - # CALL_PURE(result, func, arg_1,..,arg_n) '_CALL_LAST', '_CANRAISE_LAST', # ----- end of can_raise operations ----- From commits-noreply at bitbucket.org Wed Mar 16 11:48:27 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 11:48:27 +0100 (CET) Subject: [pypy-svn] pypy default: Fixes for test_ztranslation.py. Message-ID: <20110316104827.E81F1282BDF@codespeak.net> Author: Armin Rigo Branch: Changeset: r42710:014b105118de Date: 2011-03-16 06:47 -0400 http://bitbucket.org/pypy/pypy/changeset/014b105118de/ Log: Fixes for test_ztranslation.py. diff --git a/pypy/jit/codewriter/test/test_longlong.py b/pypy/jit/codewriter/test/test_longlong.py --- a/pypy/jit/codewriter/test/test_longlong.py +++ b/pypy/jit/codewriter/test/test_longlong.py @@ -8,6 +8,7 @@ from pypy.jit.codewriter.jtransform import Transformer, NotSupported from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.codewriter.test.test_jtransform import const +from pypy.jit.codewriter import longlong class FakeRTyper: @@ -28,6 +29,12 @@ self.rtyper = FakeRTyper() +def test_functions(): + xll = longlong.getfloatstorage(3.5) + assert longlong.getrealfloat(xll) == 3.5 + assert isinstance(longlong.gethash(xll), int) + + class TestLongLong: def setup_class(cls): if sys.maxint > 2147483647: diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -227,6 +227,14 @@ assert isinstance(y, int) return x | y +def op_int_xor(x, y): + # used in computing hashes + if isinstance(x, AddressAsInt): x = llmemory.cast_adr_to_int(x.adr) + if isinstance(y, AddressAsInt): y = llmemory.cast_adr_to_int(y.adr) + assert isinstance(x, int) + assert isinstance(y, int) + return x ^ y + def op_int_mul(x, y): assert isinstance(x, (int, llmemory.AddressOffset)) assert isinstance(y, (int, llmemory.AddressOffset)) diff --git a/pypy/jit/codewriter/longlong.py b/pypy/jit/codewriter/longlong.py --- a/pypy/jit/codewriter/longlong.py +++ b/pypy/jit/codewriter/longlong.py @@ -38,7 +38,7 @@ getfloatstorage = longlong2float.float2longlong getrealfloat = longlong2float.longlong2float - gethash = lambda xll: xll - (xll >> 32) + gethash = lambda xll: rarithmetic.intmask(xll - (xll >> 32)) is_longlong = lambda TYPE: (TYPE == lltype.SignedLongLong or TYPE == lltype.UnsignedLongLong) From commits-noreply at bitbucket.org Wed Mar 16 14:43:21 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 14:43:21 +0100 (CET) Subject: [pypy-svn] pypy default: Attempt to fix the test on pypy-c-stackless. Message-ID: <20110316134321.05321282B9E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42711:679cf08b7034 Date: 2011-03-16 09:42 -0400 http://bitbucket.org/pypy/pypy/changeset/679cf08b7034/ Log: Attempt to fix the test on pypy-c-stackless. diff --git a/pypy/module/_stackless/test/test_composable_coroutine.py b/pypy/module/_stackless/test/test_composable_coroutine.py --- a/pypy/module/_stackless/test/test_composable_coroutine.py +++ b/pypy/module/_stackless/test/test_composable_coroutine.py @@ -11,7 +11,7 @@ space = gettestobjspace(usemodules=('_stackless',)) cls.space = space - cls.w_generator = space.appexec([], """(): + cls.w_generator_ = space.appexec([], """(): import _stackless generators_costate = _stackless.usercostate() @@ -55,7 +55,7 @@ generator.Yield = Yield generator._costate = generators_costate - return generator + return (generator,) """) def test_simple_costate(self): @@ -72,7 +72,7 @@ assert result == [co] def test_generator(self): - generator = self.generator + generator, = self.generator_ def squares(n): for i in range(n): @@ -95,7 +95,7 @@ """ import _stackless - generator = self.generator + generator, = self.generator_ # you can see how it fails if we don't have two different costates # by setting compute_costate to generator._costate instead From commits-noreply at bitbucket.org Wed Mar 16 14:45:56 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 14:45:56 +0100 (CET) Subject: [pypy-svn] pypy default: Attempt to fix the test on pypy-c-stackless. Message-ID: <20110316134556.656E3282B9E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42712:6d4ae655b397 Date: 2011-03-16 09:45 -0400 http://bitbucket.org/pypy/pypy/changeset/6d4ae655b397/ Log: Attempt to fix the test on pypy-c-stackless. diff --git a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py --- a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py +++ b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py @@ -94,9 +94,9 @@ def setup_class(cls): #cls.space = gettestobjspace(**{"objspace.std.withtproxy": True, # "usemodules":("_stackless",)}) - cls.w_test_env = cls.space.appexec([], """(): + cls.w_test_env_ = cls.space.appexec([], """(): from distributed import test_env - return test_env + return (test_env,) """) cls.reclimit = sys.getrecursionlimit() sys.setrecursionlimit(100000) @@ -108,7 +108,7 @@ def f(x, y): return x + y - protocol = self.test_env({"f": f}) + protocol = self.test_env_[0]({"f": f}) fun = protocol.get_remote("f") assert fun(2, 3) == 5 @@ -119,14 +119,14 @@ def f(x): return x + g() - protocol = self.test_env({"f":f}) + protocol = self.test_env_[0]({"f":f}) fun = protocol.get_remote("f") assert fun(8) == 16 def test_remote_dict(self): #skip("Land of infinite recursion") d = {'a':3} - protocol = self.test_env({'d':d}) + protocol = self.test_env_[0]({'d':d}) xd = protocol.get_remote('d') #assert d['a'] == xd['a'] assert d.keys() == xd.keys() @@ -142,7 +142,7 @@ return self.x + 8 a = A(3) - protocol = self.test_env({'a':a}) + protocol = self.test_env_[0]({'a':a}) xa = protocol.get_remote("a") assert xa.x == 3 assert len(xa) == 11 @@ -161,7 +161,7 @@ a = A() - protocol = self.test_env({'a':a}) + protocol = self.test_env_[0]({'a':a}) xa = protocol.get_remote('a') assert xa.__class__.__doc__ == 'xxx' assert xa.meth(x) == 4 @@ -179,7 +179,7 @@ return [1,2,3] a = A() - protocol = self.test_env({'a': a}) + protocol = self.test_env_[0]({'a': a}) xa = protocol.get_remote('a') xa.meth(B()) assert xa.perform() == 4 @@ -188,7 +188,7 @@ #skip("Land of infinite recursion") import sys f = sys._getframe() - protocol = self.test_env({'f':f}) + protocol = self.test_env_[0]({'f':f}) xf = protocol.get_remote('f') assert f.f_globals.keys() == xf.f_globals.keys() assert f.f_locals.keys() == xf.f_locals.keys() @@ -197,7 +197,7 @@ def raising(): 1/0 - protocol = self.test_env({'raising':raising}) + protocol = self.test_env_[0]({'raising':raising}) xr = protocol.get_remote('raising') try: xr() @@ -217,7 +217,7 @@ return cls.z a = A() - protocol = self.test_env({'a':a}) + protocol = self.test_env_[0]({'a':a}) xa = protocol.get_remote("a") res = xa.x() assert res == 8 @@ -228,7 +228,7 @@ assert type(self) is tp a = A() - protocol = self.test_env({'a':a, 'A':A}) + protocol = self.test_env_[0]({'a':a, 'A':A}) xa = protocol.get_remote('a') xA = protocol.get_remote('A') xa.m(xA) @@ -241,7 +241,7 @@ def x(self): return self.y - protocol = self.test_env({'C':C}) + protocol = self.test_env_[0]({'C':C}) xC = protocol.get_remote('C') xc = xC(3) res = xc.x() @@ -251,14 +251,14 @@ skip("Fix me some day maybe") import sys - protocol = self.test_env({'sys':sys}) + protocol = self.test_env_[0]({'sys':sys}) s = protocol.get_remote('sys') l = dir(s) assert l def test_remote_file_access(self): skip("Descriptor logic seems broken") - protocol = self.test_env({'f':open}) + protocol = self.test_env_[0]({'f':open}) xf = protocol.get_remote('f') data = xf('/etc/passwd').read() assert data @@ -275,7 +275,7 @@ x = X() - protocol = self.test_env({'x':x}) + protocol = self.test_env_[0]({'x':x}) xx = protocol.get_remote('x') assert xx.x == 3 @@ -287,17 +287,17 @@ pass y = Y() - protocol = self.test_env({'y':y, 'X':X}) + protocol = self.test_env_[0]({'y':y, 'X':X}) xy = protocol.get_remote('y') xX = protocol.get_remote('X') assert isinstance(xy, xX) def test_key_error(self): from distributed import ObjectNotFound - protocol = self.test_env({}) + protocol = self.test_env_[0]({}) raises(ObjectNotFound, "protocol.get_remote('x')") def test_list_items(self): - protocol = self.test_env({'x':3, 'y':8}) + protocol = self.test_env_[0]({'x':3, 'y':8}) assert sorted(protocol.remote_keys()) == ['x', 'y'] From commits-noreply at bitbucket.org Wed Mar 16 14:47:00 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 16 Mar 2011 14:47:00 +0100 (CET) Subject: [pypy-svn] pypy default: (lukas) Rerased pointer patch Message-ID: <20110316134700.47EFE282B9E@codespeak.net> Author: David Schneider Branch: Changeset: r42713:d1981fab95dd Date: 2011-03-16 14:43 +0100 http://bitbucket.org/pypy/pypy/changeset/d1981fab95dd/ Log: (lukas) Rerased pointer patch diff --git a/pypy/rlib/rerased.py b/pypy/rlib/rerased.py --- a/pypy/rlib/rerased.py +++ b/pypy/rlib/rerased.py @@ -220,5 +220,7 @@ bk = self.rtyper.annotator.bookkeeper s_obj = value._identity.get_input_annotation(bk) r_obj = self.rtyper.getrepr(s_obj) + if r_obj.lowleveltype is lltype.Void: + return lltype.nullptr(self.lowleveltype.TO) v = r_obj.convert_const(value._x) return lltype.cast_opaque_ptr(self.lowleveltype, v) diff --git a/pypy/rlib/test/test_rerased.py b/pypy/rlib/test/test_rerased.py --- a/pypy/rlib/test/test_rerased.py +++ b/pypy/rlib/test/test_rerased.py @@ -120,6 +120,28 @@ x = interpret(f, []) assert x == 16 + 42 + 1 +def test_prebuilt_erased_in_instance(): + erase_empty, unerase_empty = new_erasing_pair("empty") + class FakeList(object): + pass + + x1 = X() + x1.foobar = 42 + l1 = FakeList() + l1.storage = eraseX(x1) + l2 = FakeList() + l2.storage = erase_empty(None) + + def f(): + #assert is_integer(e1) + #assert not is_integer(e2) + x1.foobar += 1 + x2 = uneraseX(l1.storage).foobar + (unerase_empty(l2.storage) is None) + return x2 + x = interpret(f, []) + assert x == 43 + True + + def test_overflow(): def f(i): try: From commits-noreply at bitbucket.org Wed Mar 16 14:49:12 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 14:49:12 +0100 (CET) Subject: [pypy-svn] pypy default: Remove doc of the removed option. Message-ID: <20110316134912.56827282B9E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42714:d2a47f3ec429 Date: 2011-03-16 09:47 -0400 http://bitbucket.org/pypy/pypy/changeset/d2a47f3ec429/ Log: Remove doc of the removed option. diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. From commits-noreply at bitbucket.org Wed Mar 16 18:06:31 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:31 +0100 (CET) Subject: [pypy-svn] pypy default: port two more tests Message-ID: <20110316170631.DA192282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42715:8615c11e4c1f Date: 2011-03-16 13:59 +0100 http://bitbucket.org/pypy/pypy/changeset/8615c11e4c1f/ Log: port two more tests diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -586,3 +586,55 @@ --TICK-- jump(p0, p1, p2, p3, i14, i5, p6, descr=) """) + + def test_chain_of_guards(self): + src = """ + class A(object): + def method_x(self): + return 3 + + l = ["x", "y"] + + def main(arg): + sum = 0 + a = A() + i = 0 + while i < 500: + name = l[arg] + sum += getattr(a, 'method_' + name)() + i += 1 + return sum + """ + log = self.run(src, [0], threshold=400) + assert log.result == 500*3 + loops = log.loops_by_filename(self.filepath) + assert len(loops) == 1 + + def test_getattr_with_dynamic_attribute(self): + src = """ + class A(object): + pass + + l = ["x", "y"] + + def main(): + sum = 0 + a = A() + a.a1 = 0 + a.a2 = 0 + a.a3 = 0 + a.a4 = 0 + a.a5 = 0 # workaround, because the first five attributes need a promotion + a.x = 1 + a.y = 2 + i = 0 + while i < 500: + name = l[i % 2] + sum += getattr(a, name) + i += 1 + return sum + """ + log = self.run(src, [], threshold=400) + assert log.result == 250 + 250*2 + loops = log.loops_by_filename(self.filepath) + assert len(loops) == 1 From commits-noreply at bitbucket.org Wed Mar 16 18:06:32 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:32 +0100 (CET) Subject: [pypy-svn] pypy default: port one more test Message-ID: <20110316170632.80CBB282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42716:362e8bfdc5ca Date: 2011-03-16 14:34 +0100 http://bitbucket.org/pypy/pypy/changeset/362e8bfdc5ca/ Log: port one more test diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -638,3 +638,36 @@ assert log.result == 250 + 250*2 loops = log.loops_by_filename(self.filepath) assert len(loops) == 1 + + def test_blockstack_virtualizable(self): + def main(n): + from pypyjit import residual_call + i = 0 + while i < n: + try: + residual_call(len, []) # ID: call + except: + pass + i += 1 + return i + # + log = self.run(main, [500], threshold=400) + assert log.result == 500 + loop, = log.loops_by_id('call') + loop.match_by_id('call', opcode='CALL_FUNCTION', expected_src=""" + # make sure that the "block" is not allocated + ... + i20 = force_token() + setfield_gc(p0, i20, descr=) + p22 = new_with_vtable(19511408) + p24 = new_array(1, descr=) + p26 = new_with_vtable(ConstClass(W_ListObject)) + p27 = new(descr=) + p29 = new_array(0, descr=) + setfield_gc(p27, p29, descr=) + setfield_gc(p26, p27, descr=<.* .*W_ListObject.inst_wrappeditems .*>) + setarrayitem_gc(p24, 0, p26, descr=) + setfield_gc(p22, p24, descr=) + p32 = call_may_force(11376960, p18, p22, descr=) + ... + """) From commits-noreply at bitbucket.org Wed Mar 16 18:06:33 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:33 +0100 (CET) Subject: [pypy-svn] pypy default: one more test Message-ID: <20110316170633.040EB282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42717:f11fa89afaaa Date: 2011-03-16 14:39 +0100 http://bitbucket.org/pypy/pypy/changeset/f11fa89afaaa/ Log: one more test diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -654,7 +654,7 @@ log = self.run(main, [500], threshold=400) assert log.result == 500 loop, = log.loops_by_id('call') - loop.match_by_id('call', opcode='CALL_FUNCTION', expected_src=""" + assert loop.match_by_id('call', opcode='CALL_FUNCTION', expected_src=""" # make sure that the "block" is not allocated ... i20 = force_token() @@ -671,3 +671,28 @@ p32 = call_may_force(11376960, p18, p22, descr=) ... """) + + def test_import_in_function(self): + def main(n): + i = 0 + while i < n: + from sys import version # ID: import + i += 1 + return i + # + log = self.run(main, [500], threshold=400) + assert log.result == 500 + loop, = log.loops_by_id('import') + assert loop.match_by_id('import', """ + p14 = call(ConstClass(ll_split_chr__GcStruct_listLlT_rpy_stringPtr_Char), p8, 46, descr=) + guard_no_exception(descr=) + guard_nonnull(p14, descr=) + i15 = getfield_gc(p14, descr=) + i16 = int_is_true(i15) + guard_true(i16, descr=) + p18 = call(ConstClass(ll_pop_default__dum_nocheckConst_listPtr), p14, descr=) + guard_no_exception(descr=) + i19 = getfield_gc(p14, descr=) + i20 = int_is_true(i19) + guard_false(i20, descr=) + """) From commits-noreply at bitbucket.org Wed Mar 16 18:06:33 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:33 +0100 (CET) Subject: [pypy-svn] pypy default: bah, actually *check* that the loop matches, and fix an annoying typo (*kwds vs **kwds) Message-ID: <20110316170633.D576F282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42718:8ed9cb7b58c3 Date: 2011-03-16 14:49 +0100 http://bitbucket.org/pypy/pypy/changeset/8ed9cb7b58c3/ Log: bah, actually *check* that the loop matches, and fix an annoying typo (*kwds vs **kwds) diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -159,7 +159,7 @@ return matcher.match(expected_src) def match_by_id(self, id, expected_src, **kwds): - ops = list(self.ops_by_id(id, *kwds)) + ops = list(self.ops_by_id(id, **kwds)) matcher = OpMatcher(ops, src=self.format_ops(id)) return matcher.match(expected_src) diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -494,19 +494,20 @@ for i in range(n): # ID: for tmp = g(n) s += tmp[i] # ID: getitem + a = 0 return s # log = self.run(main, [1000], threshold=400) assert log.result == 1000 * 999 / 2 loop, = log.loops_by_filename(self.filepath) - loop.match_by_id('getitem', opcode='BINARY_SUBSCR', expected_src=""" + assert loop.match_by_id('getitem', opcode='BINARY_SUBSCR', expected_src=""" i43 = int_lt(i25, 0) guard_false(i43, descr=) i44 = int_ge(i25, i39) guard_false(i44, descr=) i45 = int_mul(i25, i33) """) - loop.match_by_id('for', opcode='FOR_ITER', expected_src=""" + assert loop.match_by_id('for', opcode='FOR_ITER', expected_src=""" i23 = int_ge(i11, i12) guard_false(i23, descr=) i24 = int_mul(i11, i14) From commits-noreply at bitbucket.org Wed Mar 16 18:06:34 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:34 +0100 (CET) Subject: [pypy-svn] pypy default: one more test Message-ID: <20110316170634.BFE99282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42719:f1946cab282d Date: 2011-03-16 15:15 +0100 http://bitbucket.org/pypy/pypy/changeset/f1946cab282d/ Log: one more test diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -697,3 +697,25 @@ i20 = int_is_true(i19) guard_false(i20, descr=) """) + + def test_arraycopy_disappears(self): + def main(n): + i = 0 + while i < n: + t = (1, 2, 3, i + 1) + t2 = t[:] + del t + i = t2[3] + del t2 + return i + # + log = self.run(main, [500], threshold=400) + assert log.result == 500 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i7 = int_lt(i5, i6) + guard_true(i7, descr=) + i9 = int_add(i5, 1) + --TICK-- + jump(p0, p1, p2, p3, p4, i9, i6, descr=) + """) From commits-noreply at bitbucket.org Wed Mar 16 18:06:35 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:35 +0100 (CET) Subject: [pypy-svn] pypy default: port and improve test_boolrewrite_invers; please review Message-ID: <20110316170635.99568282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42720:cd6bf05d78cb Date: 2011-03-16 17:26 +0100 http://bitbucket.org/pypy/pypy/changeset/cd6bf05d78cb/ Log: port and improve test_boolrewrite_invers; please review diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -719,3 +719,43 @@ --TICK-- jump(p0, p1, p2, p3, p4, i9, i6, descr=) """) + + def test_boolrewrite_invers(self): + for a, b, res, opt_applied in (('2000', '2000', 20001000, True), + ( '500', '500', 15001500, True), + ( '300', '600', 16001700, False), + ( 'a', 'b', 16001700, False), + ( 'a', 'a', 13001700, True)): + src = """ + def main(): + sa = 0 + a = 300 + b = 600 + for i in range(1000): + if i < %s: # ID: lt + sa += 1 + else: + sa += 2 + # + if i >= %s: # ID: ge + sa += 10000 + else: + sa += 20000 + return sa + """ % (a, b) + # + log = self.run(src, [], threshold=400) + assert log.result == res + loop, = log.loops_by_filename(self.filepath) + le_ops = log.opnames(loop.ops_by_id('lt')) + ge_ops = log.opnames(loop.ops_by_id('ge')) + assert le_ops.count('int_lt') == 1 + # + if opt_applied: + assert ge_ops.count('int_ge') == 0 + else: + # if this assert fails it means that the optimization was + # applied even if we don't expect to. Check whether the + # optimization is valid, and either fix the code or fix the + # test :-) + assert ge_ops.count('int_ge') == 1 From commits-noreply at bitbucket.org Wed Mar 16 18:06:36 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:36 +0100 (CET) Subject: [pypy-svn] pypy default: port and improve also test_boolrewrite_reflex Message-ID: <20110316170636.75CD3282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42721:34eca51299a8 Date: 2011-03-16 18:05 +0100 http://bitbucket.org/pypy/pypy/changeset/34eca51299a8/ Log: port and improve also test_boolrewrite_reflex diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -721,11 +721,11 @@ """) def test_boolrewrite_invers(self): - for a, b, res, opt_applied in (('2000', '2000', 20001000, True), - ( '500', '500', 15001500, True), - ( '300', '600', 16001700, False), - ( 'a', 'b', 16001700, False), - ( 'a', 'a', 13001700, True)): + for a, b, res, opt_expected in (('2000', '2000', 20001000, True), + ( '500', '500', 15001500, True), + ( '300', '600', 16001700, False), + ( 'a', 'b', 16001700, False), + ( 'a', 'a', 13001700, True)): src = """ def main(): sa = 0 @@ -751,7 +751,7 @@ ge_ops = log.opnames(loop.ops_by_id('ge')) assert le_ops.count('int_lt') == 1 # - if opt_applied: + if opt_expected: assert ge_ops.count('int_ge') == 0 else: # if this assert fails it means that the optimization was @@ -759,3 +759,42 @@ # optimization is valid, and either fix the code or fix the # test :-) assert ge_ops.count('int_ge') == 1 + + def test_boolrewrite_reflex(self): + for a, b, res, opt_expected in (('2000', '2000', 10001000, True), + ( '500', '500', 15001500, True), + ( '300', '600', 14001700, False), + ( 'a', 'b', 14001700, False), + ( 'a', 'a', 17001700, True)): + + src = """ + def main(): + sa = 0 + a = 300 + b = 600 + for i in range(1000): + if i < %s: # ID: lt + sa += 1 + else: + sa += 2 + if %s > i: # ID: gt + sa += 10000 + else: + sa += 20000 + return sa + """ % (a, b) + log = self.run(src, [], threshold=400) + assert log.result == res + loop, = log.loops_by_filename(self.filepath) + le_ops = log.opnames(loop.ops_by_id('lt')) + gt_ops = log.opnames(loop.ops_by_id('gt')) + assert le_ops.count('int_lt') == 1 + # + if opt_expected: + assert gt_ops.count('int_gt') == 0 + else: + # if this assert fails it means that the optimization was + # applied even if we don't expect to. Check whether the + # optimization is valid, and either fix the code or fix the + # test :-) + assert gt_ops.count('int_gt') == 1 From commits-noreply at bitbucket.org Wed Mar 16 18:06:37 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 16 Mar 2011 18:06:37 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110316170637.DB6DD282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42722:9879b1c1d824 Date: 2011-03-16 18:06 +0100 http://bitbucket.org/pypy/pypy/changeset/9879b1c1d824/ Log: merge heads diff --git a/pypy/jit/metainterp/simple_optimize.py b/pypy/jit/metainterp/simple_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/simple_optimize.py +++ /dev/null @@ -1,56 +0,0 @@ - -""" Simplified optimize.py -""" - -from pypy.jit.metainterp.resoperation import rop, ResOperation -from pypy.jit.metainterp import resume, compile - -EMPTY_VALUES = {} - -def transform(op): - from pypy.jit.metainterp.history import AbstractDescr - # Rename CALL_PURE and CALL_LOOPINVARIANT to CALL. - # Simplify the VIRTUAL_REF_* so that they don't show up in the backend. - if op.getopnum() == rop.CALL_PURE: - op = ResOperation(rop.CALL, op.getarglist()[1:], op.result, - op.getdescr()) - elif op.getopnum() == rop.CALL_LOOPINVARIANT: - op = op.copy_and_change(rop.CALL) - elif op.getopnum() == rop.VIRTUAL_REF: - op = ResOperation(rop.SAME_AS, [op.getarg(0)], op.result) - elif op.getopnum() == rop.VIRTUAL_REF_FINISH: - return [] - return [op] - -def optimize_loop(metainterp_sd, old_loops, loop): - if old_loops: - assert len(old_loops) == 1 - return old_loops[0] - else: - # copy loop operations here - # we need it since the backend can modify those lists, which make - # get_guard_op in compile.py invalid - # in fact, x86 modifies this list for moving GCs - memo = resume.ResumeDataLoopMemo(metainterp_sd) - newoperations = [] - for op in loop.operations: - if op.is_guard(): - descr = op.getdescr() - assert isinstance(descr, compile.ResumeGuardDescr) - modifier = resume.ResumeDataVirtualAdder(descr, memo) - newboxes = modifier.finish(EMPTY_VALUES) - descr.store_final_boxes(op, newboxes) - newoperations.extend(transform(op)) - loop.operations = newoperations - jumpop = newoperations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(loop.token) - return None - -def optimize_bridge(metainterp_sd, old_loops, loop, inline_short_preamble, - retraced): - optimize_loop(metainterp_sd, [], loop) - jumpop = loop.operations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(old_loops[0]) - return old_loops[0] diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. From commits-noreply at bitbucket.org Wed Mar 16 18:35:24 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 16 Mar 2011 18:35:24 +0100 (CET) Subject: [pypy-svn] pypy default: DML queries don't have a description. Message-ID: <20110316173524.D0FAA282B9D@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42723:3a9ce32bfe66 Date: 2011-03-16 13:34 -0400 http://bitbucket.org/pypy/pypy/changeset/3a9ce32bfe66/ Log: DML queries don't have a description. diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -31,9 +31,9 @@ from threading import _get_ident as thread_get_ident names = "sqlite3.dll libsqlite3.so.0 libsqlite3.so libsqlite3.dylib".split() -for name in names: +for name in names: try: - sqlite = cdll.LoadLibrary(name) + sqlite = cdll.LoadLibrary(name) break except OSError: continue @@ -1032,6 +1032,8 @@ self.statement = None def _get_description(self): + if self.kind == "DML": + return None desc = [] for i in xrange(sqlite.sqlite3_column_count(self.statement)): name = sqlite.sqlite3_column_name(self.statement, i).split("[")[0].strip() @@ -1140,7 +1142,7 @@ def _convert_result(con, val): if val is None: - sqlite.sqlite3_result_null(con) + sqlite.sqlite3_result_null(con) elif isinstance(val, (bool, int, long)): sqlite.sqlite3_result_int64(con, int(val)) elif isinstance(val, str): diff --git a/lib-python/modified-2.7.0/sqlite3/test/regression.py b/lib-python/modified-2.7.0/sqlite3/test/regression.py --- a/lib-python/modified-2.7.0/sqlite3/test/regression.py +++ b/lib-python/modified-2.7.0/sqlite3/test/regression.py @@ -264,6 +264,16 @@ """ self.assertRaises(sqlite.Warning, self.con, 1) + def CheckUpdateDescriptionNone(self): + """ + Call Cursor.update with an UPDATE query and check that it sets the + cursor's description to be None. + """ + cur = self.con.cursor() + cur.execute("CREATE TABLE foo (id INTEGER)") + cur.execute("UPDATE foo SET id = 3 WHERE id = 1") + self.assertEqual(cur.description, None) + def suite(): regression_suite = unittest.makeSuite(RegressionTests, "Check") return unittest.TestSuite((regression_suite,)) From commits-noreply at bitbucket.org Wed Mar 16 18:35:25 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 16 Mar 2011 18:35:25 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110316173525.250A0282BD6@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42724:6c6b14bfadd2 Date: 2011-03-16 13:35 -0400 http://bitbucket.org/pypy/pypy/changeset/6c6b14bfadd2/ Log: merged upstream From commits-noreply at bitbucket.org Wed Mar 16 19:03:47 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 16 Mar 2011 19:03:47 +0100 (CET) Subject: [pypy-svn] pypy default: Don't allow arbitrary kwargs to sqlite3 connection. Message-ID: <20110316180347.34C5E36C055@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42725:6239935d3564 Date: 2011-03-16 14:02 -0400 http://bitbucket.org/pypy/pypy/changeset/6239935d3564/ Log: Don't allow arbitrary kwargs to sqlite3 connection. diff --git a/lib_pypy/_sqlite3.py b/lib_pypy/_sqlite3.py --- a/lib_pypy/_sqlite3.py +++ b/lib_pypy/_sqlite3.py @@ -232,7 +232,7 @@ return unicode(x, 'utf-8') class Connection(object): - def __init__(self, database, isolation_level="", detect_types=0, timeout=None, *args, **kwargs): + def __init__(self, database, isolation_level="", detect_types=0, timeout=None, cached_statements=None, factory=None): self.db = c_void_p() if sqlite.sqlite3_open(database, byref(self.db)) != SQLITE_OK: raise OperationalError("Could not open database") From commits-noreply at bitbucket.org Wed Mar 16 19:42:02 2011 From: commits-noreply at bitbucket.org (ademan) Date: Wed, 16 Mar 2011 19:42:02 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Cleaning up a bit. Message-ID: <20110316184202.E3A17282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42726:bc1f407a6e5f Date: 2011-03-16 11:41 -0700 http://bitbucket.org/pypy/pypy/changeset/bc1f407a6e5f/ Log: Cleaning up a bit. diff --git a/pypy/jit/metainterp/optimizeopt/fold_intadd.py b/pypy/jit/metainterp/optimizeopt/fold_intadd.py --- a/pypy/jit/metainterp/optimizeopt/fold_intadd.py +++ b/pypy/jit/metainterp/optimizeopt/fold_intadd.py @@ -21,7 +21,6 @@ func(self, op) break else: - #self.optimize_default(op) self.emit_operation(op) def _int_operation(self, variable, constant, result): @@ -66,9 +65,7 @@ if lv.is_constant() and rv.is_constant(): self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? elif lv.is_constant(): - #constant = lv.box.getint() - #self._process_add(op.getarg(1), constant, result) - # TODO: implement + # TODO: implement? self.emit_operation(op) elif rv.is_constant(): constant = rv.box.getint() @@ -76,21 +73,4 @@ else: self.emit_operation(op) - def optimize_default(self, op): - for i in range(op.numargs()): - arg = self.getvalue(op.getarg(i)) - if arg.is_constant(): - continue - - try: - variable = op.getarg(i) - root, constant = self.args[variable] - - new_op = self._int_operation(root, constant, variable) - self.emit_operation(new_op) - except KeyError: - pass - self.emit_operation(op) - - optimize_ops = _findall(OptAddition, 'optimize_') From commits-noreply at bitbucket.org Wed Mar 16 20:15:05 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 20:15:05 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Simplify the logic. Message-ID: <20110316191505.59B82282B9D@codespeak.net> Author: Armin Rigo Branch: jit-lsprofile Changeset: r42727:84a431cd73a0 Date: 2011-03-16 13:49 -0400 http://bitbucket.org/pypy/pypy/changeset/84a431cd73a0/ Log: Simplify the logic. diff --git a/pypy/rlib/rtimer.py b/pypy/rlib/rtimer.py --- a/pypy/rlib/rtimer.py +++ b/pypy/rlib/rtimer.py @@ -1,25 +1,14 @@ import time -import py - -from pypy.rlib.rarithmetic import r_longlong +from pypy.rlib.rarithmetic import r_longlong, r_ulonglong from pypy.rpython.extregistry import ExtRegistryEntry from pypy.rpython.lltypesystem import rffi -from pypy.tool.autopath import pypydir -eci = rffi.ExternalCompilationInfo( - include_dirs = [str(py.path.local(pypydir).join('translator', 'c'))], - includes=["src/timer.h"], - separate_module_sources = [' '], -) -c_read_timestamp = rffi.llexternal( - 'pypy_read_timestamp', [], rffi.LONGLONG, - compilation_info=eci, _nowrapper=True -) - def read_timestamp(): - return c_read_timestamp() + # returns a longlong. When running on top of python, build + # the result a bit arbitrarily. + return r_longlong(r_ulonglong(long(time.time() * 500000000))) class ReadTimestampEntry(ExtRegistryEntry): From commits-noreply at bitbucket.org Wed Mar 16 20:15:08 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 20:15:08 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Start supporting both 32-bit and 64-bit. Type differences: Message-ID: <20110316191508.780BB282BD8@codespeak.net> Author: Armin Rigo Branch: jit-lsprofile Changeset: r42728:5009a0570c3a Date: 2011-03-16 14:07 -0400 http://bitbucket.org/pypy/pypy/changeset/5009a0570c3a/ Log: Start supporting both 32-bit and 64-bit. Type differences: BoxFloats store floats on 64-bit but longlongs on 32-bit, and read_timestamp() returns an int on 64-bit but a r_longlong on 32-bit. diff --git a/pypy/rlib/rtimer.py b/pypy/rlib/rtimer.py --- a/pypy/rlib/rtimer.py +++ b/pypy/rlib/rtimer.py @@ -1,14 +1,21 @@ import time -from pypy.rlib.rarithmetic import r_longlong, r_ulonglong +from pypy.rlib.rarithmetic import r_longlong, r_ulonglong, r_uint +from pypy.rlib.rarithmetic import intmask, longlongmask from pypy.rpython.extregistry import ExtRegistryEntry -from pypy.rpython.lltypesystem import rffi +from pypy.rpython.lltypesystem import lltype, rffi + +_is_64_bit = r_uint.BITS > 32 def read_timestamp(): - # returns a longlong. When running on top of python, build - # the result a bit arbitrarily. - return r_longlong(r_ulonglong(long(time.time() * 500000000))) + # Returns a longlong on 32-bit, and a regular int on 64-bit. + # When running on top of python, build the result a bit arbitrarily. + x = long(time.time() * 500000000) + if _is_64_bit: + return intmask(x) + else: + return longlongmask(x) class ReadTimestampEntry(ExtRegistryEntry): @@ -16,8 +23,15 @@ def compute_result_annotation(self): from pypy.annotation.model import SomeInteger - return SomeInteger(knowntype=r_longlong) + if _is_64_bit: + return SomeInteger() + else: + return SomeInteger(knowntype=r_longlong) def specialize_call(self, hop): hop.exception_cannot_occur() - return hop.genop("ll_read_timestamp", [], resulttype=rffi.LONGLONG) + if _is_64_bit: + resulttype = lltype.Signed + else: + resulttype = rffi.LONGLONG + return hop.genop("ll_read_timestamp", [], resulttype=resulttype) diff --git a/pypy/jit/codewriter/longlong.py b/pypy/jit/codewriter/longlong.py --- a/pypy/jit/codewriter/longlong.py +++ b/pypy/jit/codewriter/longlong.py @@ -16,6 +16,7 @@ from pypy.rlib.objectmodel import compute_hash + is_64_bit = True supports_longlong = False r_float_storage = float FLOATSTORAGE = lltype.Float @@ -32,6 +33,7 @@ from pypy.rlib import rarithmetic, longlong2float + is_64_bit = False supports_longlong = True r_float_storage = rarithmetic.r_longlong FLOATSTORAGE = lltype.SignedLongLong diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -25,7 +25,7 @@ from pypy.rlib.objectmodel import ComputedIntSymbolic, we_are_translated from pypy.rlib.rarithmetic import ovfcheck from pypy.rlib.rarithmetic import r_longlong, r_ulonglong, r_uint -from pypy.rlib.rtimer import c_read_timestamp +from pypy.rlib.rtimer import read_timestamp import py from pypy.tool.ansi_print import ansi_log @@ -858,7 +858,7 @@ return llmemory.cast_ptr_to_adr(opaque_frame) def op_read_timestamp(self, descr): - return c_read_timestamp() + return read_timestamp() def op_call_may_force(self, calldescr, func, *args): assert not self._forced diff --git a/pypy/jit/metainterp/executor.py b/pypy/jit/metainterp/executor.py --- a/pypy/jit/metainterp/executor.py +++ b/pypy/jit/metainterp/executor.py @@ -5,8 +5,8 @@ from pypy.rpython.lltypesystem import lltype, llmemory, rstr from pypy.rpython.ootypesystem import ootype from pypy.rpython.lltypesystem.lloperation import llop -from pypy.rlib.rarithmetic import ovfcheck, r_uint, intmask -from pypy.rlib.rtimer import c_read_timestamp +from pypy.rlib.rarithmetic import ovfcheck, r_uint, intmask, r_longlong +from pypy.rlib.rtimer import read_timestamp from pypy.rlib.unroll import unrolling_iterable from pypy.jit.metainterp.history import BoxInt, BoxPtr, BoxFloat, check_descr from pypy.jit.metainterp.history import INT, REF, FLOAT, VOID, AbstractDescr @@ -229,7 +229,13 @@ rstr.copy_unicode_contents(src, dst, srcstart, dststart, length) def do_read_timestamp(cpu, _): - return BoxFloat(c_read_timestamp()) + x = read_timestamp() + if longlong.is_64_bit: + assert isinstance(x, int) # 64-bit + return BoxInt(x) + else: + assert isinstance(x, r_longlong) # 32-bit + return BoxFloat(x) # ____________________________________________________________ diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py --- a/pypy/jit/metainterp/blackhole.py +++ b/pypy/jit/metainterp/blackhole.py @@ -1,5 +1,5 @@ from pypy.rlib.unroll import unrolling_iterable -from pypy.rlib.rtimer import c_read_timestamp +from pypy.rlib.rtimer import read_timestamp from pypy.rlib.rarithmetic import intmask, LONG_BIT, r_uint, ovfcheck from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.debug import debug_start, debug_stop @@ -1206,9 +1206,9 @@ def bhimpl_unicodesetitem(cpu, unicode, index, newchr): cpu.bh_unicodesetitem(unicode, index, newchr) - @arguments(returns="f") + @arguments(returns=(longlong.is_64_bit and "i" or "f")) def bhimpl_ll_read_timestamp(): - return c_read_timestamp() + return read_timestamp() # ---------- # helpers to resume running in blackhole mode when a guard failed From commits-noreply at bitbucket.org Wed Mar 16 20:15:10 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 16 Mar 2011 20:15:10 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: (alex, arigo, greg) Support read_timestamp in x86-64. Message-ID: <20110316191510.B0290282BD6@codespeak.net> Author: Armin Rigo Branch: jit-lsprofile Changeset: r42729:d7c8a96f8263 Date: 2011-03-16 15:14 -0400 http://bitbucket.org/pypy/pypy/changeset/d7c8a96f8263/ Log: (alex, arigo, greg) Support read_timestamp in x86-64. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -1364,13 +1364,15 @@ assert 0, itemsize def genop_read_timestamp(self, op, arglocs, resloc): - # XXX cheat - addr1 = self.fail_boxes_int.get_addr_for_num(0) - addr2 = self.fail_boxes_int.get_addr_for_num(1) self.mc.RDTSC() - self.mc.MOV(heap(addr1), eax) - self.mc.MOV(heap(addr2), edx) - self.mc.MOVSD(resloc, heap(addr1)) + if longlong.is_64_bit: + self.mc.SHL_ri(edx.value, 32) + self.mc.OR_rr(edx.value, eax.value) + else: + loc1, = arglocs + self.mc.MOVD_xr(loc1.value, edx.value) + self.mc.MOVD_xr(resloc.value, eax.value) + self.mc.PUNPCKLDQ_xx(resloc.value, loc1.value) def genop_guard_guard_true(self, ign_1, guard_op, guard_token, locs, ign_2): loc = locs[0] diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -1356,6 +1356,19 @@ self.execute_operation(rop.JIT_DEBUG, [c_box, c_nest, c_nest, c_nest, c_nest], 'void') + def test_read_timestamp(self): + if longlong.is_64_bit: + got1 = self.execute_operation(rop.READ_TIMESTAMP, [], 'int') + got2 = self.execute_operation(rop.READ_TIMESTAMP, [], 'int') + res1 = got1.getint() + res2 = got2.getint() + else: + got1 = self.execute_operation(rop.READ_TIMESTAMP, [], 'float') + got2 = self.execute_operation(rop.READ_TIMESTAMP, [], 'float') + res1 = got1.getlonglong() + res2 = got2.getlonglong() + assert res1 < res2 < res1 + 2**32 + class LLtypeBackendTest(BaseBackendTest): diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -1158,13 +1158,26 @@ def consider_read_timestamp(self, op): tmpbox_high = TempBox() - tmpbox_low = TempBox() self.rm.force_allocate_reg(tmpbox_high, selected_reg=eax) - self.rm.force_allocate_reg(tmpbox_low, selected_reg=edx) - result_loc = self.xrm.force_allocate_reg(op.result) - self.Perform(op, [], result_loc) + if longlong.is_64_bit: + # on 64-bit, use rax as temporary register and returns the + # result in rdx + result_loc = self.rm.force_allocate_reg(op.result, + selected_reg=edx) + self.Perform(op, [], result_loc) + else: + # on 32-bit, use both eax and edx as temporary registers, + # use a temporary xmm register, and returns the result in + # another xmm register. + tmpbox_low = TempBox() + self.rm.force_allocate_reg(tmpbox_low, selected_reg=edx) + xmmtmpbox = TempBox() + xmmtmploc = self.xrm.force_allocate_reg(xmmtmpbox) + result_loc = self.xrm.force_allocate_reg(op.result) + self.Perform(op, [xmmtmploc], result_loc) + self.xrm.possibly_free_var(xmmtmpbox) + self.rm.possibly_free_var(tmpbox_low) self.rm.possibly_free_var(tmpbox_high) - self.rm.possibly_free_var(tmpbox_low) def consider_jump(self, op): assembler = self.assembler From commits-noreply at bitbucket.org Thu Mar 17 01:11:31 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 01:11:31 +0100 (CET) Subject: [pypy-svn] pypy default: (arigo (and alex and greg around)) Message-ID: <20110317001131.6E2DA282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42730:7e193f3296f7 Date: 2011-03-16 20:10 -0400 http://bitbucket.org/pypy/pypy/changeset/7e193f3296f7/ Log: (arigo (and alex and greg around)) Fix an obscure case triggered when the portal returns a short integer-like type (like a char or a bool). Fixed by changing the signature of assembler_call_helper() and handle_jitexception() to return the standardized type instead of a char or a bool. diff --git a/pypy/jit/metainterp/warmstate.py b/pypy/jit/metainterp/warmstate.py --- a/pypy/jit/metainterp/warmstate.py +++ b/pypy/jit/metainterp/warmstate.py @@ -47,7 +47,7 @@ elif isinstance(value, float): return longlong.getfloatstorage(value) else: - return intmask(value) + return lltype.cast_primitive(lltype.Signed, value) @specialize.arg(0) def unwrap(TYPE, box): diff --git a/pypy/jit/backend/x86/test/test_ztranslation.py b/pypy/jit/backend/x86/test/test_ztranslation.py --- a/pypy/jit/backend/x86/test/test_ztranslation.py +++ b/pypy/jit/backend/x86/test/test_ztranslation.py @@ -64,7 +64,7 @@ k = myabs(j) if k - abs(j): raise ValueError if k - abs(-j): raise ValueError - return total * 10 + return chr(total % 253) # from pypy.rpython.lltypesystem import lltype, rffi from pypy.rlib.libffi import types, CDLL, ArgChain @@ -84,10 +84,12 @@ argchain.arg(x) res = func.call(argchain, rffi.DOUBLE) i -= 1 - return int(res) + return res # def main(i, j): - return f(i, j) + libffi_stuff(i, j) + a_char = f(i, j) + a_float = libffi_stuff(i, j) + return ord(a_char) * 10 + int(a_float) expected = main(40, -49) res = self.meta_interp(main, [40, -49]) assert res == expected diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py --- a/pypy/jit/metainterp/warmspot.py +++ b/pypy/jit/metainterp/warmspot.py @@ -489,8 +489,19 @@ jd._PTR_JIT_ENTER_FUNCTYPE) = self.cpu.ts.get_FuncType(ALLARGS, lltype.Void) (jd._PORTAL_FUNCTYPE, jd._PTR_PORTAL_FUNCTYPE) = self.cpu.ts.get_FuncType(ALLARGS, RESTYPE) + # + if jd.result_type == 'v': + ASMRESTYPE = lltype.Void + elif jd.result_type == history.INT: + ASMRESTYPE = lltype.Signed + elif jd.result_type == history.REF: + ASMRESTYPE = llmemory.GCREF + elif jd.result_type == history.FLOAT: + ASMRESTYPE = lltype.Float + else: + assert False (_, jd._PTR_ASSEMBLER_HELPER_FUNCTYPE) = self.cpu.ts.get_FuncType( - [lltype.Signed, llmemory.GCREF], RESTYPE) + [lltype.Signed, llmemory.GCREF], ASMRESTYPE) def rewrite_can_enter_jits(self): sublists = {} @@ -671,7 +682,7 @@ raise Exception, value def handle_jitexception(e): - # XXX the bulk of this function is a copy-paste from above :-( + # XXX the bulk of this function is mostly a copy-paste from above try: raise e except self.ContinueRunningNormally, e: @@ -680,19 +691,22 @@ x = getattr(e, attrname)[count] x = specialize_value(ARGTYPE, x) args = args + (x,) - return ll_portal_runner(*args) + result = ll_portal_runner(*args) + if result_kind != 'void': + result = unspecialize_value(result) + return result except self.DoneWithThisFrameVoid: assert result_kind == 'void' return except self.DoneWithThisFrameInt, e: assert result_kind == 'int' - return specialize_value(RESULT, e.result) + return e.result except self.DoneWithThisFrameRef, e: assert result_kind == 'ref' - return specialize_value(RESULT, e.result) + return e.result except self.DoneWithThisFrameFloat, e: assert result_kind == 'float' - return specialize_value(RESULT, e.result) + return e.result except self.ExitFrameWithExceptionRef, e: value = ts.cast_to_baseclass(e.value) if not we_are_translated(): @@ -736,17 +750,16 @@ def handle_jitexception_from_blackhole(bhcaller, e): result = handle_jitexception(e) - # - if result_kind != 'void': - result = unspecialize_value(result) - if result_kind == 'int': - bhcaller._setup_return_value_i(result) - elif result_kind == 'ref': - bhcaller._setup_return_value_r(result) - elif result_kind == 'float': - bhcaller._setup_return_value_f(result) - else: - assert False + if result_kind == 'void': + pass + if result_kind == 'int': + bhcaller._setup_return_value_i(result) + elif result_kind == 'ref': + bhcaller._setup_return_value_r(result) + elif result_kind == 'float': + bhcaller._setup_return_value_f(result) + else: + assert False jd.handle_jitexc_from_bh = handle_jitexception_from_blackhole # ____________________________________________________________ From commits-noreply at bitbucket.org Thu Mar 17 02:22:21 2011 From: commits-noreply at bitbucket.org (ademan) Date: Thu, 17 Mar 2011 02:22:21 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Moved optimization. Message-ID: <20110317012221.C2FD8282B9D@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42731:026b2680bc47 Date: 2011-03-16 18:22 -0700 http://bitbucket.org/pypy/pypy/changeset/026b2680bc47/ Log: Moved optimization. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -7,13 +7,13 @@ from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify -from pypy.jit.metainterp.optimizeopt.fold_intadd import OptAddition +from pypy.jit.metainterp.optimizeopt.addition import OptAddition from pypy.rlib.jit import PARAMETERS from pypy.rlib.unroll import unrolling_iterable ALL_OPTS = [ ('intbounds', OptIntBounds), - ('fold_intadd', OptAddition), + ('addition', OptAddition), ('rewrite', OptRewrite), ('virtualize', OptVirtualize), ('string', OptString), diff --git a/pypy/jit/metainterp/optimizeopt/addition.py b/pypy/jit/metainterp/optimizeopt/addition.py new file mode 100644 --- /dev/null +++ b/pypy/jit/metainterp/optimizeopt/addition.py @@ -0,0 +1,76 @@ +from pypy.jit.metainterp.optimizeopt.optimizer import * +from pypy.jit.metainterp.resoperation import opboolinvers, opboolreflex +from pypy.jit.metainterp.history import ConstInt +from pypy.jit.metainterp.optimizeutil import _findall +from pypy.jit.metainterp.resoperation import rop, ResOperation +from pypy.jit.codewriter.effectinfo import EffectInfo +from pypy.jit.metainterp.optimizeopt.intutils import IntBound +from pypy.rlib.rarithmetic import highest_bit + +class OptAddition(Optimization): + def __init__(self): + self.args = {} + + def reconstruct_for_next_iteration(self, optimizer, valuemap): + return OptAddition() + + def propagate_forward(self, op): + opnum = op.getopnum() + for value, func in optimize_ops: + if opnum == value: + func(self, op) + break + else: + self.emit_operation(op) + + def _int_operation(self, variable, constant, result): + if constant < 0: + constant = ConstInt(-constant) + return ResOperation(rop.INT_SUB, [variable, constant], result) + else: + constant = ConstInt(constant) + return ResOperation(rop.INT_ADD, [variable, constant], result) + + def _process_add(self, variable, constant, result): + try: + root, stored_constant = self.args[variable] + constant = constant + stored_constant + except KeyError: + root = variable + + self.args[result] = root, constant + + new_op = self._int_operation(root, constant, result) + self.emit_operation(new_op) + + def optimize_INT_ADD(self, op): + lv = self.getvalue(op.getarg(0)) + rv = self.getvalue(op.getarg(1)) + result = op.result + if lv.is_constant() and rv.is_constant(): + self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? + elif lv.is_constant(): + constant = lv.box.getint() + self._process_add(op.getarg(1), constant, result) + elif rv.is_constant(): + constant = rv.box.getint() + self._process_add(op.getarg(0), constant, result) + else: + self.emit_operation(op) + + def optimize_INT_SUB(self, op): + lv = self.getvalue(op.getarg(0)) + rv = self.getvalue(op.getarg(1)) + result = op.result + if lv.is_constant() and rv.is_constant(): + self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? + elif lv.is_constant(): + # TODO: implement? + self.emit_operation(op) + elif rv.is_constant(): + constant = rv.box.getint() + self._process_add(op.getarg(0), -constant, result) + else: + self.emit_operation(op) + +optimize_ops = _findall(OptAddition, 'optimize_') diff --git a/pypy/jit/metainterp/optimizeopt/fold_intadd.py b/pypy/jit/metainterp/optimizeopt/fold_intadd.py deleted file mode 100644 --- a/pypy/jit/metainterp/optimizeopt/fold_intadd.py +++ /dev/null @@ -1,76 +0,0 @@ -from pypy.jit.metainterp.optimizeopt.optimizer import * -from pypy.jit.metainterp.resoperation import opboolinvers, opboolreflex -from pypy.jit.metainterp.history import ConstInt -from pypy.jit.metainterp.optimizeutil import _findall -from pypy.jit.metainterp.resoperation import rop, ResOperation -from pypy.jit.codewriter.effectinfo import EffectInfo -from pypy.jit.metainterp.optimizeopt.intutils import IntBound -from pypy.rlib.rarithmetic import highest_bit - -class OptAddition(Optimization): - def __init__(self): - self.args = {} - - def reconstruct_for_next_iteration(self, optimizer, valuemap): - return OptAddition() - - def propagate_forward(self, op): - opnum = op.getopnum() - for value, func in optimize_ops: - if opnum == value: - func(self, op) - break - else: - self.emit_operation(op) - - def _int_operation(self, variable, constant, result): - if constant < 0: - constant = ConstInt(-constant) - return ResOperation(rop.INT_SUB, [variable, constant], result) - else: - constant = ConstInt(constant) - return ResOperation(rop.INT_ADD, [variable, constant], result) - - def _process_add(self, variable, constant, result): - try: - root, stored_constant = self.args[variable] - constant = constant + stored_constant - except KeyError: - root = variable - - self.args[result] = root, constant - - new_op = self._int_operation(root, constant, result) - self.emit_operation(new_op) - - def optimize_INT_ADD(self, op): - lv = self.getvalue(op.getarg(0)) - rv = self.getvalue(op.getarg(1)) - result = op.result - if lv.is_constant() and rv.is_constant(): - self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? - elif lv.is_constant(): - constant = lv.box.getint() - self._process_add(op.getarg(1), constant, result) - elif rv.is_constant(): - constant = rv.box.getint() - self._process_add(op.getarg(0), constant, result) - else: - self.emit_operation(op) - - def optimize_INT_SUB(self, op): - lv = self.getvalue(op.getarg(0)) - rv = self.getvalue(op.getarg(1)) - result = op.result - if lv.is_constant() and rv.is_constant(): - self.emit_operation(op) # XXX: there's support for optimizing this elsewhere, right? - elif lv.is_constant(): - # TODO: implement? - self.emit_operation(op) - elif rv.is_constant(): - constant = rv.box.getint() - self._process_add(op.getarg(0), -constant, result) - else: - self.emit_operation(op) - -optimize_ops = _findall(OptAddition, 'optimize_') From commits-noreply at bitbucket.org Thu Mar 17 16:43:33 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 16:43:33 +0100 (CET) Subject: [pypy-svn] pypy default: Write a test and fix the obscure issue. Message-ID: <20110317154333.D8F43282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42732:4c61817002b1 Date: 2011-03-17 11:42 -0400 http://bitbucket.org/pypy/pypy/changeset/4c61817002b1/ Log: Write a test and fix the obscure issue. diff --git a/pypy/jit/metainterp/test/test_recursive.py b/pypy/jit/metainterp/test/test_recursive.py --- a/pypy/jit/metainterp/test/test_recursive.py +++ b/pypy/jit/metainterp/test/test_recursive.py @@ -1146,6 +1146,40 @@ res = self.meta_interp(main, [], inline=True, trace_limit=tlimit) assert ''.join(res.chars) == 'ABCDEFGHIabcdefghijJ' * 5 + def test_handle_jitexception_in_portal_returns_void(self): + # a test for _handle_jitexception_in_portal in blackhole.py + driver = JitDriver(greens = ['codeno'], reds = ['i', 'str'], + get_printable_location = lambda codeno: str(codeno)) + def do_can_enter_jit(codeno, i, str): + i = (i+1)-1 # some operations + driver.can_enter_jit(codeno=codeno, i=i, str=str) + def intermediate(codeno, i, str): + if i == 9: + do_can_enter_jit(codeno, i, str) + def portal(codeno, str): + i = value.initial + while i < 10: + intermediate(codeno, i, str) + driver.jit_merge_point(codeno=codeno, i=i, str=str) + i += 1 + if codeno == 64 and i == 10: + portal(96, str) + str += chr(codeno+i) + class Value: + initial = -1 + value = Value() + def main(): + value.initial = 0 + portal(64, '') + portal(64, '') + portal(64, '') + portal(64, '') + portal(64, '') + main() + for tlimit in [95, 90, 102]: + print 'tlimit =', tlimit + self.meta_interp(main, [], inline=True, trace_limit=tlimit) + def test_no_duplicates_bug(self): driver = JitDriver(greens = ['codeno'], reds = ['i'], get_printable_location = lambda codeno: str(codeno)) diff --git a/pypy/jit/metainterp/warmspot.py b/pypy/jit/metainterp/warmspot.py --- a/pypy/jit/metainterp/warmspot.py +++ b/pypy/jit/metainterp/warmspot.py @@ -752,7 +752,7 @@ result = handle_jitexception(e) if result_kind == 'void': pass - if result_kind == 'int': + elif result_kind == 'int': bhcaller._setup_return_value_i(result) elif result_kind == 'ref': bhcaller._setup_return_value_r(result) From commits-noreply at bitbucket.org Thu Mar 17 17:00:33 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 17:00:33 +0100 (CET) Subject: [pypy-svn] pypy default: Add "make" as a dependency. Message-ID: <20110317160033.8A9FF282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42733:dccecf1ef5a8 Date: 2011-03-17 12:00 -0400 http://bitbucket.org/pypy/pypy/changeset/dccecf1ef5a8/ Log: Add "make" as a dependency. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt --- a/pypy/doc/getting-started-python.txt +++ b/pypy/doc/getting-started-python.txt @@ -38,7 +38,7 @@ 1. Install dependencies. You need (these are Debian package names, adapt as needed): - * ``gcc`` + * ``gcc`` and ``make`` * ``python-dev`` * ``python-ctypes`` if you are still using Python2.4 * ``libffi-dev`` From commits-noreply at bitbucket.org Thu Mar 17 17:03:10 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 17:03:10 +0100 (CET) Subject: [pypy-svn] pypy default: Print more information in case running Message-ID: <20110317160310.E89AD282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42734:3206f83a219c Date: 2011-03-17 12:02 -0400 http://bitbucket.org/pypy/pypy/changeset/3206f83a219c/ Log: Print more information in case running the external process failed. diff --git a/pypy/tool/runsubprocess.py b/pypy/tool/runsubprocess.py --- a/pypy/tool/runsubprocess.py +++ b/pypy/tool/runsubprocess.py @@ -70,5 +70,5 @@ assert results.startswith('(') results = eval(results) if results[0] is None: - raise OSError('%s: %s' % (args[0], results[1])) + raise OSError('%s: %s\nargs=%r' % (args[0], results[1], args)) return results From commits-noreply at bitbucket.org Thu Mar 17 18:04:03 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 17 Mar 2011 18:04:03 +0100 (CET) Subject: [pypy-svn] pypy default: port these two "brute-force" tests from test_pypy_c Message-ID: <20110317170403.A0700282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42735:a8f2a52b6da7 Date: 2011-03-17 16:18 +0100 http://bitbucket.org/pypy/pypy/changeset/a8f2a52b6da7/ Log: port these two "brute-force" tests from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -720,7 +720,17 @@ jump(p0, p1, p2, p3, p4, i9, i6, descr=) """) - def test_boolrewrite_invers(self): + def test_boolrewrite_inverse(self): + """ + Test for this case:: + guard(i < x) + ... + guard(i >= y) + + where x and y can be either constants or variables. There are cases in + which the second guard is proven to be always true. + """ + for a, b, res, opt_expected in (('2000', '2000', 20001000, True), ( '500', '500', 15001500, True), ( '300', '600', 16001700, False), @@ -761,6 +771,15 @@ assert ge_ops.count('int_ge') == 1 def test_boolrewrite_reflex(self): + """ + Test for this case:: + guard(i < x) + ... + guard(y > i) + + where x and y can be either constants or variables. There are cases in + which the second guard is proven to be always true. + """ for a, b, res, opt_expected in (('2000', '2000', 10001000, True), ( '500', '500', 15001500, True), ( '300', '600', 14001700, False), @@ -798,3 +817,143 @@ # optimization is valid, and either fix the code or fix the # test :-) assert gt_ops.count('int_gt') == 1 + + + def test_boolrewrite_allcases_inverse(self): + """ + Test for this case:: + guard(i < x) + ... + guard(i > y) + + with all possible combination of binary comparison operators. This + test only checks that we get the expected result, not that any + optimization has been applied. + """ + + def opval(i, op, a): + if eval('%d %s %d' % (i, op, a)): return 1 + return 2 + + ops = ('<', '>', '<=', '>=', '==', '!=') + for op1 in ops: + for op2 in ops: + for a,b in ((500, 500), (300, 600)): + res = 0 + res += opval(a-1, op1, a) * (a) + res += opval( a, op1, a) + res += opval(a+1, op1, a) * (1000 - a - 1) + res += opval(b-1, op2, b) * 10000 * (b) + res += opval( b, op2, b) * 10000 + res += opval(b+1, op2, b) * 10000 * (1000 - b - 1) + + src = """ + def main(): + sa = 0 + for i in range(1000): + if i %s %d: + sa += 1 + else: + sa += 2 + if i %s %d: + sa += 10000 + else: + sa += 20000 + return sa + """ % (op1, a, op2, b) + log = self.run(src, threshold=400) + assert log.result == res + # check that the JIT actually ran + assert len(log.loops_by_filename(self.filepath)) > 0 + + src = """ + def main(): + sa = 0 + i = 0.0 + while i < 250.0: + if i %s %f: + sa += 1 + else: + sa += 2 + if i %s %f: + sa += 10000 + else: + sa += 20000 + i += 0.25 + return sa + """ % (op1, float(a)/4.0, op2, float(b)/4.0) + log = self.run(src, threshold=400) + assert log.result == res + # check that the JIT actually ran + assert len(log.loops_by_filename(self.filepath)) > 0 + + + def test_boolrewrite_allcases_reflex(self): + """ + Test for this case:: + guard(i < x) + ... + guard(x > i) + + with all possible combination of binary comparison operators. This + test only checks that we get the expected result, not that any + optimization has been applied. + """ + + def opval(i, op, a): + if eval('%d %s %d' % (i, op, a)): return 1 + return 2 + + ops = ('<', '>', '<=', '>=', '==', '!=') + for op1 in ops: + for op2 in ops: + for a,b in ((500, 500), (300, 600)): + res = 0 + res += opval(a-1, op1, a) * (a) + res += opval( a, op1, a) + res += opval(a+1, op1, a) * (1000 - a - 1) + res += opval(b, op2, b-1) * 10000 * (b) + res += opval(b, op2, b) * 10000 + res += opval(b, op2, b+1) * 10000 * (1000 - b - 1) + + src = """ + def main(): + sa = 0 + for i in range(1000): + if i %s %d: + sa += 1 + else: + sa += 2 + if %d %s i: + sa += 10000 + else: + sa += 20000 + return sa + """ % (op1, a, b, op2) + log = self.run(src) + log = self.run(src, threshold=400) + assert log.result == res + # check that the JIT actually ran + assert len(log.loops_by_filename(self.filepath)) > 0 + + src = """ + def main(): + sa = 0 + i = 0.0 + while i < 250.0: + if i %s %f: + sa += 1 + else: + sa += 2 + if %f %s i: + sa += 10000 + else: + sa += 20000 + i += 0.25 + return sa + """ % (op1, float(a)/4.0, float(b)/4.0, op2) + log = self.run(src, threshold=400) + assert log.result == res + # check that the JIT actually ran + assert len(log.loops_by_filename(self.filepath)) > 0 + From commits-noreply at bitbucket.org Thu Mar 17 18:04:04 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 17 Mar 2011 18:04:04 +0100 (CET) Subject: [pypy-svn] pypy default: simplify these two tests by automatically computing the result instead of using complex logic for it Message-ID: <20110317170404.6E127282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42736:0758b2b0a2e5 Date: 2011-03-17 17:15 +0100 http://bitbucket.org/pypy/pypy/changeset/0758b2b0a2e5/ Log: simplify these two tests by automatically computing the result instead of using complex logic for it diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -830,23 +830,10 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - - def opval(i, op, a): - if eval('%d %s %d' % (i, op, a)): return 1 - return 2 - ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): - res = 0 - res += opval(a-1, op1, a) * (a) - res += opval( a, op1, a) - res += opval(a+1, op1, a) * (1000 - a - 1) - res += opval(b-1, op2, b) * 10000 * (b) - res += opval( b, op2, b) * 10000 - res += opval(b+1, op2, b) * 10000 * (1000 - b - 1) - src = """ def main(): sa = 0 @@ -861,10 +848,7 @@ sa += 20000 return sa """ % (op1, a, op2, b) - log = self.run(src, threshold=400) - assert log.result == res - # check that the JIT actually ran - assert len(log.loops_by_filename(self.filepath)) > 0 + self.run_and_check(src, threshold=400) src = """ def main(): @@ -882,10 +866,7 @@ i += 0.25 return sa """ % (op1, float(a)/4.0, op2, float(b)/4.0) - log = self.run(src, threshold=400) - assert log.result == res - # check that the JIT actually ran - assert len(log.loops_by_filename(self.filepath)) > 0 + self.run_and_check(src, threshold=400) def test_boolrewrite_allcases_reflex(self): @@ -899,23 +880,10 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - - def opval(i, op, a): - if eval('%d %s %d' % (i, op, a)): return 1 - return 2 - ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): - res = 0 - res += opval(a-1, op1, a) * (a) - res += opval( a, op1, a) - res += opval(a+1, op1, a) * (1000 - a - 1) - res += opval(b, op2, b-1) * 10000 * (b) - res += opval(b, op2, b) * 10000 - res += opval(b, op2, b+1) * 10000 * (1000 - b - 1) - src = """ def main(): sa = 0 @@ -930,11 +898,7 @@ sa += 20000 return sa """ % (op1, a, b, op2) - log = self.run(src) - log = self.run(src, threshold=400) - assert log.result == res - # check that the JIT actually ran - assert len(log.loops_by_filename(self.filepath)) > 0 + self.run_and_check(src, threshold=400) src = """ def main(): @@ -952,8 +916,4 @@ i += 0.25 return sa """ % (op1, float(a)/4.0, float(b)/4.0, op2) - log = self.run(src, threshold=400) - assert log.result == res - # check that the JIT actually ran - assert len(log.loops_by_filename(self.filepath)) > 0 - + self.run_and_check(src, threshold=400) diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -63,6 +63,13 @@ log.result = eval(stdout) return log + def run_and_check(self, src, args=[], **jitopts): + log1 = self.run(src, args, threshold=-1) # without the JIT + log2 = self.run(src, args, **jitopts) # with the JIT + assert log1.result == log2.result + # check that the JIT actually ran + assert len(log2.loops_by_filename(self.filepath)) > 0 + class TestLog(object): From commits-noreply at bitbucket.org Thu Mar 17 18:04:05 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 17 Mar 2011 18:04:05 +0100 (CET) Subject: [pypy-svn] pypy default: port test_boolrewrite_ptr from test_pypy_c Message-ID: <20110317170405.0771C282BD6@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42737:ff95a8606eee Date: 2011-03-17 17:18 +0100 http://bitbucket.org/pypy/pypy/changeset/ff95a8606eee/ Log: port test_boolrewrite_ptr from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -917,3 +917,32 @@ return sa """ % (op1, float(a)/4.0, float(b)/4.0, op2) self.run_and_check(src, threshold=400) + + def test_boolrewrite_ptr(self): + # XXX this test is way too imprecise in what it is actually testing + # it should count the number of guards instead + compares = ('a == b', 'b == a', 'a != b', 'b != a', 'a == c', 'c != b') + for e1 in compares: + for e2 in compares: + src = """ + class tst(object): + pass + def main(): + a = tst() + b = tst() + c = tst() + sa = 0 + for i in range(1000): + if %s: + sa += 1 + else: + sa += 2 + if %s: + sa += 10000 + else: + sa += 20000 + if i > 750: + a = b + return sa + """ % (e1, e2) + self.run_and_check(src, threshold=400) From commits-noreply at bitbucket.org Thu Mar 17 18:04:05 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 17 Mar 2011 18:04:05 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110317170405.88175282BD7@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42738:cbc7eb739340 Date: 2011-03-17 18:03 +0100 http://bitbucket.org/pypy/pypy/changeset/cbc7eb739340/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 17 18:26:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 18:26:01 +0100 (CET) Subject: [pypy-svn] pypy default: A test, and a very tentative fix (no clue really, as it depends on Message-ID: <20110317172601.5226236C20F@codespeak.net> Author: Armin Rigo Branch: Changeset: r42739:678a07ff0b73 Date: 2011-03-17 13:25 -0400 http://bitbucket.org/pypy/pypy/changeset/678a07ff0b73/ Log: A test, and a very tentative fix (no clue really, as it depends on the "XXX What is this check about?" already present). diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -49,7 +49,8 @@ absolute = "from __future__ import absolute_import\nimport string", relative_b = "from __future__ import absolute_import\nfrom . import string", relative_c = "from __future__ import absolute_import\nfrom .string import inpackage", - relative_f = "from .os import sep", + relative_f = "from .imp import get_magic", + relative_g = "import imp; from .imp import get_magic", ) setuppkg("pkg.pkg1", a = '', @@ -359,7 +360,13 @@ def imp(): from pkg import relative_f exc = raises(ImportError, imp) - assert exc.value.message == "No module named pkg.os" + assert exc.value.message == "No module named pkg.imp" + + def test_no_relative_import_bug(self): + def imp(): + from pkg import relative_g + exc = raises(ImportError, imp) + assert exc.value.message == "No module named pkg.imp" def test_future_relative_import_level_1(self): from pkg import relative_c diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -158,7 +158,8 @@ w_mod = check_sys_modules(space, w(rel_modulename)) if (w_mod is None or - not space.is_w(w_mod, space.w_None)): + not space.is_w(w_mod, space.w_None) or + level > 0): # if no level was set, ignore import errors, and # fall back to absolute import at the end of the From commits-noreply at bitbucket.org Thu Mar 17 18:55:42 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 18:55:42 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac, necaris): Initial sphinxification of documentation, using sphinx-quickstart (0.6.4) Message-ID: <20110317175542.9831D282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42740:c500bfb42c9a Date: 2011-03-14 11:49 -0400 http://bitbucket.org/pypy/pypy/changeset/c500bfb42c9a/ Log: (dmalcolm, lac, necaris): Initial sphinxification of documentation, using sphinx-quickstart (0.6.4) The temp_index.txt is the index.txt that would have been generated. Keeping for now whilst we figure out how to merge it with the existing index.txt diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -52,6 +52,7 @@ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ +^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ ^pypy/doc/jit/.+\.html$ diff --git a/pypy/doc/conf.py b/pypy/doc/conf.py new file mode 100644 --- /dev/null +++ b/pypy/doc/conf.py @@ -0,0 +1,198 @@ +# -*- coding: utf-8 -*- +# +# PyPy documentation build configuration file, created by +# sphinx-quickstart on Mon Mar 14 10:44:41 2011. +# +# This file is execfile()d with the current directory set to its containing dir. +# +# Note that not all possible configuration values are present in this +# autogenerated file. +# +# All configuration values have a default; values that are commented out +# serve to show the default. + +import sys, os + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +#sys.path.append(os.path.abspath('.')) + +# -- General configuration ----------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be extensions +# coming with Sphinx (named 'sphinx.ext.*') or your custom ones. +extensions = ['sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.ifconfig'] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ['_templates'] + +# The suffix of source filenames. +source_suffix = '.txt' + +# The encoding of source files. +#source_encoding = 'utf-8' + +# The master toctree document. +master_doc = 'temp_index' + +# General information about the project. +project = u'PyPy' +copyright = u'2011, The PyPy Project' + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. +# +# The short X.Y version. +version = '1.4.1' +# The full version, including alpha/beta/rc tags. +release = '1.4.1' + +# The language for content autogenerated by Sphinx. Refer to documentation +# for a list of supported languages. +#language = None + +# There are two options for replacing |today|: either, you set today to some +# non-false value, then it is used: +#today = '' +# Else, today_fmt is used as the format for a strftime call. +#today_fmt = '%B %d, %Y' + +# List of documents that shouldn't be included in the build. +#unused_docs = [] + +# List of directories, relative to source directory, that shouldn't be searched +# for source files. +exclude_trees = ['_build'] + +# The reST default role (used for this markup: `text`) to use for all documents. +#default_role = None + +# If true, '()' will be appended to :func: etc. cross-reference text. +#add_function_parentheses = True + +# If true, the current module name will be prepended to all description +# unit titles (such as .. function::). +#add_module_names = True + +# If true, sectionauthor and moduleauthor directives will be shown in the +# output. They are ignored by default. +#show_authors = False + +# The name of the Pygments (syntax highlighting) style to use. +pygments_style = 'sphinx' + +# A list of ignored prefixes for module index sorting. +#modindex_common_prefix = [] + + +# -- Options for HTML output --------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. Major themes that come with +# Sphinx are currently 'default' and 'sphinxdoc'. +html_theme = 'default' + +# Theme options are theme-specific and customize the look and feel of a theme +# further. For a list of options available for each theme, see the +# documentation. +#html_theme_options = {} + +# Add any paths that contain custom themes here, relative to this directory. +#html_theme_path = [] + +# The name for this set of Sphinx documents. If None, it defaults to +# " v documentation". +#html_title = None + +# A shorter title for the navigation bar. Default is the same as html_title. +#html_short_title = None + +# The name of an image file (relative to this directory) to place at the top +# of the sidebar. +#html_logo = None + +# The name of an image file (within the static path) to use as favicon of the +# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 +# pixels large. +#html_favicon = None + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ['_static'] + +# If not '', a 'Last updated on:' timestamp is inserted at every page bottom, +# using the given strftime format. +#html_last_updated_fmt = '%b %d, %Y' + +# If true, SmartyPants will be used to convert quotes and dashes to +# typographically correct entities. +#html_use_smartypants = True + +# Custom sidebar templates, maps document names to template names. +#html_sidebars = {} + +# Additional templates that should be rendered to pages, maps page names to +# template names. +#html_additional_pages = {} + +# If false, no module index is generated. +#html_use_modindex = True + +# If false, no index is generated. +#html_use_index = True + +# If true, the index is split into individual pages for each letter. +#html_split_index = False + +# If true, links to the reST sources are added to the pages. +#html_show_sourcelink = True + +# If true, an OpenSearch description file will be output, and all pages will +# contain a tag referring to it. The value of this option must be the +# base URL from which the finished HTML is served. +#html_use_opensearch = '' + +# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). +#html_file_suffix = '' + +# Output file base name for HTML help builder. +htmlhelp_basename = 'PyPydoc' + + +# -- Options for LaTeX output -------------------------------------------------- + +# The paper size ('letter' or 'a4'). +#latex_paper_size = 'letter' + +# The font size ('10pt', '11pt' or '12pt'). +#latex_font_size = '10pt' + +# Grouping the document tree into LaTeX files. List of tuples +# (source start file, target name, title, author, documentclass [howto/manual]). +latex_documents = [ + ('temp_index', 'PyPy.tex', u'PyPy Documentation', + u'The PyPy Project', 'manual'), +] + +# The name of an image file (relative to this directory) to place at the top of +# the title page. +#latex_logo = None + +# For "manual" documents, if this is true, then toplevel headings are parts, +# not chapters. +#latex_use_parts = False + +# Additional stuff for the LaTeX preamble. +#latex_preamble = '' + +# Documents to append as an appendix to all manuals. +#latex_appendices = [] + +# If false, no module index is generated. +#latex_use_modindex = True + + +# Example configuration for intersphinx: refer to the Python standard library. +intersphinx_mapping = {'http://docs.python.org/': None} diff --git a/pypy/doc/temp_index.txt b/pypy/doc/temp_index.txt new file mode 100644 --- /dev/null +++ b/pypy/doc/temp_index.txt @@ -0,0 +1,20 @@ +.. PyPy documentation master file, created by + sphinx-quickstart on Mon Mar 14 10:44:41 2011. + You can adapt this file completely to your liking, but it should at least + contain the root `toctree` directive. + +Welcome to PyPy's documentation! +================================ + +Contents: + +.. toctree:: + :maxdepth: 2 + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` + diff --git a/pypy/doc/make.bat b/pypy/doc/make.bat new file mode 100644 --- /dev/null +++ b/pypy/doc/make.bat @@ -0,0 +1,113 @@ + at ECHO OFF + +REM Command file for Sphinx documentation + +set SPHINXBUILD=sphinx-build +set BUILDDIR=_build +set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . +if NOT "%PAPER%" == "" ( + set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% +) + +if "%1" == "" goto help + +if "%1" == "help" ( + :help + echo.Please use `make ^` where ^ is one of + echo. html to make standalone HTML files + echo. dirhtml to make HTML files named index.html in directories + echo. pickle to make pickle files + echo. json to make JSON files + echo. htmlhelp to make HTML files and a HTML help project + echo. qthelp to make HTML files and a qthelp project + echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter + echo. changes to make an overview over all changed/added/deprecated items + echo. linkcheck to check all external links for integrity + echo. doctest to run all doctests embedded in the documentation if enabled + goto end +) + +if "%1" == "clean" ( + for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i + del /q /s %BUILDDIR%\* + goto end +) + +if "%1" == "html" ( + %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/html. + goto end +) + +if "%1" == "dirhtml" ( + %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml + echo. + echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. + goto end +) + +if "%1" == "pickle" ( + %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle + echo. + echo.Build finished; now you can process the pickle files. + goto end +) + +if "%1" == "json" ( + %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json + echo. + echo.Build finished; now you can process the JSON files. + goto end +) + +if "%1" == "htmlhelp" ( + %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp + echo. + echo.Build finished; now you can run HTML Help Workshop with the ^ +.hhp project file in %BUILDDIR%/htmlhelp. + goto end +) + +if "%1" == "qthelp" ( + %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp + echo. + echo.Build finished; now you can run "qcollectiongenerator" with the ^ +.qhcp project file in %BUILDDIR%/qthelp, like this: + echo.^> qcollectiongenerator %BUILDDIR%\qthelp\PyPy.qhcp + echo.To view the help file: + echo.^> assistant -collectionFile %BUILDDIR%\qthelp\PyPy.ghc + goto end +) + +if "%1" == "latex" ( + %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex + echo. + echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. + goto end +) + +if "%1" == "changes" ( + %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes + echo. + echo.The overview file is in %BUILDDIR%/changes. + goto end +) + +if "%1" == "linkcheck" ( + %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck + echo. + echo.Link check complete; look for any errors in the above output ^ +or in %BUILDDIR%/linkcheck/output.txt. + goto end +) + +if "%1" == "doctest" ( + %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest + echo. + echo.Testing of doctests in the sources finished, look at the ^ +results in %BUILDDIR%/doctest/output.txt. + goto end +) + +:end diff --git a/pypy/doc/Makefile b/pypy/doc/Makefile new file mode 100644 --- /dev/null +++ b/pypy/doc/Makefile @@ -0,0 +1,89 @@ +# Makefile for Sphinx documentation +# + +# You can set these variables from the command line. +SPHINXOPTS = +SPHINXBUILD = sphinx-build +PAPER = +BUILDDIR = _build + +# Internal variables. +PAPEROPT_a4 = -D latex_paper_size=a4 +PAPEROPT_letter = -D latex_paper_size=letter +ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . + +.PHONY: help clean html dirhtml pickle json htmlhelp qthelp latex changes linkcheck doctest + +help: + @echo "Please use \`make ' where is one of" + @echo " html to make standalone HTML files" + @echo " dirhtml to make HTML files named index.html in directories" + @echo " pickle to make pickle files" + @echo " json to make JSON files" + @echo " htmlhelp to make HTML files and a HTML help project" + @echo " qthelp to make HTML files and a qthelp project" + @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" + @echo " changes to make an overview of all changed/added/deprecated items" + @echo " linkcheck to check all external links for integrity" + @echo " doctest to run all doctests embedded in the documentation (if enabled)" + +clean: + -rm -rf $(BUILDDIR)/* + +html: + $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." + +dirhtml: + $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml + @echo + @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." + +pickle: + $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle + @echo + @echo "Build finished; now you can process the pickle files." + +json: + $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json + @echo + @echo "Build finished; now you can process the JSON files." + +htmlhelp: + $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp + @echo + @echo "Build finished; now you can run HTML Help Workshop with the" \ + ".hhp project file in $(BUILDDIR)/htmlhelp." + +qthelp: + $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp + @echo + @echo "Build finished; now you can run "qcollectiongenerator" with the" \ + ".qhcp project file in $(BUILDDIR)/qthelp, like this:" + @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/PyPy.qhcp" + @echo "To view the help file:" + @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/PyPy.qhc" + +latex: + $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex + @echo + @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." + @echo "Run \`make all-pdf' or \`make all-ps' in that directory to" \ + "run these through (pdf)latex." + +changes: + $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes + @echo + @echo "The overview file is in $(BUILDDIR)/changes." + +linkcheck: + $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck + @echo + @echo "Link check complete; look for any errors in the above output " \ + "or in $(BUILDDIR)/linkcheck/output.txt." + +doctest: + $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest + @echo "Testing of doctests in the sources finished, look at the " \ + "results in $(BUILDDIR)/doctest/output.txt." From commits-noreply at bitbucket.org Thu Mar 17 18:55:43 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 18:55:43 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac, necaris): Work-in-progress on organizing the docs Message-ID: <20110317175543.6644C282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42741:8a599a3234f9 Date: 2011-03-14 13:05 -0400 http://bitbucket.org/pypy/pypy/changeset/8a599a3234f9/ Log: (dmalcolm, lac, necaris): Work-in-progress on organizing the docs diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt --- a/pypy/doc/glossary.txt +++ b/pypy/doc/glossary.txt @@ -1,7 +1,17 @@ +.. _glossary: + +******** +Glossary +******** + PyPy, like any large project, has developed a jargon of its own. This document gives brief definition of some of these terms and provides links to more information. +.. if you add new entries, keep the alphabetical sorting! + +.. glossary:: + **abstract interpretation** The technique of interpreting the bytecode of a user program with an interpreter that handles abstract objects instead of concrete ones. diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt --- a/pypy/doc/index.txt +++ b/pypy/doc/index.txt @@ -1,3 +1,6 @@ + +Welcome to PyPy Development +============================================= The PyPy project aims at producing a flexible and fast Python_ implementation. The guiding idea is to translate a Python-level @@ -5,6 +8,149 @@ Rumors have it that the secret goal is being faster-than-C which is nonsense, isn't it? `more...`_ +.. toctree:: + :maxdepth: 2 + + .. The following stuff has not yet been categorized: + + sandbox.txt + sprint-reports.txt + stackless.txt + statistic/index.txt + svn-help.txt + theory.txt + translation.txt + translation-aspects.txt + video-index.txt + windows.txt + discussion/GC-performance.txt + discussion/VM-integration.txt + discussion/chained_getattr.txt + discussion/cli-optimizations.txt + discussion/cmd-prompt-translation.txt + discussion/compiled-swamp.txt + discussion/ctypes_modules.txt + discussion/ctypes_todo.txt + discussion/distribution.txt + discussion/distribution-implementation.txt + discussion/distribution-newattempt.txt + discussion/distribution-roadmap.txt + discussion/emptying-the-malloc-zoo.txt + discussion/finalizer-order.txt + discussion/gc.txt + discussion/howtoimplementpickling.txt + discussion/improve-rpython.txt + discussion/outline-external-ootype.txt + discussion/oz-thread-api.txt + discussion/paper-wishlist.txt + discussion/parsing-ideas.txt + discussion/pypy_metaclasses_in_cl.txt + discussion/removing-stable-compiler.txt + discussion/security-ideas.txt + discussion/somepbc-refactoring-plan.txt + discussion/summer-of-pypy-pytest.txt + discussion/testing-zope.txt + discussion/thoughts_string_interning.txt + discussion/translation-swamp.txt + discussion/use_case_of_logic.txt + + + + .. STUFF THAT'S BEEN DONE + + .. This needs merging somehow: + docindex.txt + + .. The following stuff is high-value and (vaguely) true: + getting-started.txt + getting-started-python.txt + getting-started-dev.txt + faq.txt + architecture.txt + coding-guide.txt + cleanup-todo.txt + cpython_differences.txt + garbage_collection.txt + interpreter.txt + objspace.txt + + dev_method.txt + download.txt + extending.txt + + extradoc.txt + .. ^^ integrate this one level up: dcolish? + + glossary.txt + + contributor.txt + + .. True, high-detail: + interpreter-optimizations.txt + configuration.txt + low-level-encapsulation.txt + parser.txt + rlib.txt + rtyper.txt + jit/_ref.txt + jit/index.txt + jit/overview.txt + jit/pyjitpl5.txt + + ctypes-implementation.txt + .. ^^ needs attention + + how-to-release.txt + .. ^^ needs attention + + index-report.txt + .. ^^ of historic interest, and about EU fundraising + + maemo.txt + .. ^^ obscure corner; not sure of status + + .. The following stuff is good material relating to unmaintained areas of the project: + .. .Net stuff: + cli-backend.txt + clr-module.txt + carbonpython.txt + + .. Release notes: + release-0.6.txt + release-0.7.0.txt + release-0.8.0.txt + release-0.9.0.txt + release-0.99.0.txt + release-1.0.0.txt + release-1.1.0.txt + release-1.2.0.txt + release-1.3.0.txt + release-1.4.0.txt + release-1.4.0beta.txt + release-1.4.1.txt + + + .. The following stuff is old (and crufty?), and needs further investigation: + buildtool.txt + distribution.txt + eventhistory.txt + .. ^^ Incomplete, superceded elsewhere + + externaltools.txt + .. ^^ Incomplete and wrong, superceded elsewhere + + geninterp.txt + .. ^^ apparently dead + + objspace-proxies.txt + + old_news.txt + + project-ideas.txt + + rffi.txt + + Getting into PyPy ... ============================================= @@ -57,3 +203,12 @@ .. _`Getting Started`: getting-started.html .. _papers: extradoc.html .. _`Release 1.4`: http://pypy.org/download.html + +Indices and tables +================== + +* :ref:`genindex` +* :ref:`modindex` +* :ref:`search` +* :ref:`glossary` + From commits-noreply at bitbucket.org Thu Mar 17 18:55:44 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 18:55:44 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac, necaris): First pass at categorizing the docs into an index Message-ID: <20110317175544.022E6282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42742:480a8afb911d Date: 2011-03-14 13:27 -0400 http://bitbucket.org/pypy/pypy/changeset/480a8afb911d/ Log: (dmalcolm, lac, necaris): First pass at categorizing the docs into an index The sphinx "make html" emits warnings for the comments, but they're non-fatal diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt --- a/pypy/doc/index.txt +++ b/pypy/doc/index.txt @@ -11,18 +11,123 @@ .. toctree:: :maxdepth: 2 - .. The following stuff has not yet been categorized: + .. STUFF THAT'S BEEN THROUGH 1ST PASS CATEGORIZATION: + + .. The following stuff is high-value and (vaguely) true: + getting-started.txt + getting-started-python.txt + getting-started-dev.txt + faq.txt + architecture.txt + coding-guide.txt + cleanup-todo.txt + cpython_differences.txt + garbage_collection.txt + interpreter.txt + objspace.txt + + dev_method.txt + download.txt + extending.txt + windows.txt + + extradoc.txt + .. ^^ integrate this one level up: dcolish? + + glossary.txt + + contributor.txt + + .. True, high-detail: + interpreter-optimizations.txt + configuration.txt + low-level-encapsulation.txt + parser.txt + rlib.txt + rtyper.txt + translation.txt + jit/_ref.txt + jit/index.txt + jit/overview.txt + jit/pyjitpl5.txt + + ctypes-implementation.txt + .. ^^ needs attention + + how-to-release.txt + .. ^^ needs attention + + index-report.txt + .. ^^ of historic interest, and about EU fundraising + + maemo.txt + .. ^^ obscure corner; not sure of status + + stackless.txt + .. ^^ it still works; needs JIT integration; hasn't been maintained for years + + .. The following stuff is good material relating to unmaintained areas of the project: + .. .Net stuff: + cli-backend.txt + clr-module.txt + carbonpython.txt + + .. Release notes: + release-0.6.txt + release-0.7.0.txt + release-0.8.0.txt + release-0.9.0.txt + release-0.99.0.txt + release-1.0.0.txt + release-1.1.0.txt + release-1.2.0.txt + release-1.3.0.txt + release-1.4.0.txt + release-1.4.0beta.txt + release-1.4.1.txt + + + .. The following stuff is old (and crufty?), and needs further investigation: + buildtool.txt + distribution.txt + eventhistory.txt + .. ^^ Incomplete, superceded elsewhere + + externaltools.txt + .. ^^ Incomplete and wrong, superceded elsewhere + + geninterp.txt + .. ^^ apparently dead + + objspace-proxies.txt + + old_news.txt + + sprint-reports.txt + + project-ideas.txt + + rffi.txt sandbox.txt - sprint-reports.txt - stackless.txt + .. ^^ it continues to work, but is unmaintained + statistic/index.txt + + theory.txt + .. ^^ old ideas; we're not doing it this way any more + + translation-aspects.txt + .. ^^ old and needs updating + + .. This needs merging somehow: + docindex.txt + + .. Needs merging/replacing with hg stuff: svn-help.txt - theory.txt - translation.txt - translation-aspects.txt - video-index.txt - windows.txt + + .. The following discussions have not yet been categorized: + discussion/GC-performance.txt discussion/VM-integration.txt discussion/chained_getattr.txt @@ -54,101 +159,8 @@ discussion/translation-swamp.txt discussion/use_case_of_logic.txt - - - .. STUFF THAT'S BEEN DONE - - .. This needs merging somehow: - docindex.txt - - .. The following stuff is high-value and (vaguely) true: - getting-started.txt - getting-started-python.txt - getting-started-dev.txt - faq.txt - architecture.txt - coding-guide.txt - cleanup-todo.txt - cpython_differences.txt - garbage_collection.txt - interpreter.txt - objspace.txt - - dev_method.txt - download.txt - extending.txt - - extradoc.txt - .. ^^ integrate this one level up: dcolish? - - glossary.txt - - contributor.txt - - .. True, high-detail: - interpreter-optimizations.txt - configuration.txt - low-level-encapsulation.txt - parser.txt - rlib.txt - rtyper.txt - jit/_ref.txt - jit/index.txt - jit/overview.txt - jit/pyjitpl5.txt - - ctypes-implementation.txt - .. ^^ needs attention - - how-to-release.txt - .. ^^ needs attention - - index-report.txt - .. ^^ of historic interest, and about EU fundraising - - maemo.txt - .. ^^ obscure corner; not sure of status - - .. The following stuff is good material relating to unmaintained areas of the project: - .. .Net stuff: - cli-backend.txt - clr-module.txt - carbonpython.txt - - .. Release notes: - release-0.6.txt - release-0.7.0.txt - release-0.8.0.txt - release-0.9.0.txt - release-0.99.0.txt - release-1.0.0.txt - release-1.1.0.txt - release-1.2.0.txt - release-1.3.0.txt - release-1.4.0.txt - release-1.4.0beta.txt - release-1.4.1.txt - - - .. The following stuff is old (and crufty?), and needs further investigation: - buildtool.txt - distribution.txt - eventhistory.txt - .. ^^ Incomplete, superceded elsewhere - - externaltools.txt - .. ^^ Incomplete and wrong, superceded elsewhere - - geninterp.txt - .. ^^ apparently dead - - objspace-proxies.txt - - old_news.txt - - project-ideas.txt - - rffi.txt + .. STUFF THAT'S DIFFICULT TO CATEGORIZE + video-index.txt Getting into PyPy ... From commits-noreply at bitbucket.org Thu Mar 17 18:55:52 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 18:55:52 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac, necaris): Remove sectnum directives from docs to avoid ugliness in top-level index Message-ID: <20110317175552.5F66D282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42743:0aa953aafa8f Date: 2011-03-14 13:40 -0400 http://bitbucket.org/pypy/pypy/changeset/0aa953aafa8f/ Log: (dmalcolm, lac, necaris): Remove sectnum directives from docs to avoid ugliness in top-level index diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt --- a/pypy/doc/getting-started.txt +++ b/pypy/doc/getting-started.txt @@ -2,9 +2,6 @@ PyPy - Getting Started ================================== -.. contents:: -.. sectnum:: - .. _howtopypy: What is PyPy ? diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt --- a/pypy/doc/interpreter.txt +++ b/pypy/doc/interpreter.txt @@ -3,7 +3,7 @@ =================================== .. contents:: -.. sectnum:: + Introduction and Overview diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt --- a/pypy/doc/docindex.txt +++ b/pypy/doc/docindex.txt @@ -4,7 +4,7 @@ .. _Python: http://www.python.org/doc/2.5.2/ -.. sectnum:: + .. contents:: :depth: 1 diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt --- a/pypy/doc/coding-guide.txt +++ b/pypy/doc/coding-guide.txt @@ -3,7 +3,7 @@ ===================================== .. contents:: -.. sectnum:: + This document describes coding requirements and conventions for diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt --- a/pypy/doc/garbage_collection.txt +++ b/pypy/doc/garbage_collection.txt @@ -3,7 +3,7 @@ ========================== .. contents:: -.. sectnum:: + Introduction ============ diff --git a/pypy/doc/objspace-proxies.txt b/pypy/doc/objspace-proxies.txt --- a/pypy/doc/objspace-proxies.txt +++ b/pypy/doc/objspace-proxies.txt @@ -3,7 +3,7 @@ ================================= .. contents:: -.. sectnum:: + Thanks to the `Object Space`_ architecture, any feature that is diff --git a/pypy/doc/rtyper.txt b/pypy/doc/rtyper.txt --- a/pypy/doc/rtyper.txt +++ b/pypy/doc/rtyper.txt @@ -2,7 +2,7 @@ ================= .. contents:: -.. sectnum:: + The RPython Typer lives in the directory `pypy/rpython/`_. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt --- a/pypy/doc/getting-started-python.txt +++ b/pypy/doc/getting-started-python.txt @@ -3,7 +3,7 @@ ============================================== .. contents:: -.. sectnum:: + PyPy's Python interpreter is a very compliant Python interpreter implemented in Python. When translated to C, it passes most of diff --git a/pypy/doc/translation-aspects.txt b/pypy/doc/translation-aspects.txt --- a/pypy/doc/translation-aspects.txt +++ b/pypy/doc/translation-aspects.txt @@ -3,7 +3,7 @@ ========================================================================================== .. contents:: -.. sectnum:: + Introduction ============= diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt --- a/pypy/doc/architecture.txt +++ b/pypy/doc/architecture.txt @@ -3,7 +3,7 @@ ================================================== .. contents:: -.. sectnum:: + This document gives an overview of the goals and architecture of PyPy. See `getting started`_ for a practical introduction and starting points. diff --git a/pypy/doc/translation.txt b/pypy/doc/translation.txt --- a/pypy/doc/translation.txt +++ b/pypy/doc/translation.txt @@ -3,7 +3,7 @@ ===================== .. contents:: -.. sectnum:: + This document describes the tool chain that we have developed to analyze and "compile" RPython_ programs (like PyPy itself) to various target diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt --- a/pypy/doc/getting-started-dev.txt +++ b/pypy/doc/getting-started-dev.txt @@ -3,7 +3,7 @@ =============================================================================== .. contents:: -.. sectnum:: + .. _`try out the translator`: diff --git a/pypy/doc/theory.txt b/pypy/doc/theory.txt --- a/pypy/doc/theory.txt +++ b/pypy/doc/theory.txt @@ -3,7 +3,7 @@ =================================== .. contents:: -.. sectnum:: + .. _`abstract interpretation`: diff --git a/pypy/doc/low-level-encapsulation.txt b/pypy/doc/low-level-encapsulation.txt --- a/pypy/doc/low-level-encapsulation.txt +++ b/pypy/doc/low-level-encapsulation.txt @@ -3,7 +3,7 @@ ============================================================ .. contents:: -.. sectnum:: + Abstract diff --git a/pypy/doc/objspace.txt b/pypy/doc/objspace.txt --- a/pypy/doc/objspace.txt +++ b/pypy/doc/objspace.txt @@ -3,7 +3,7 @@ ====================== .. contents:: -.. sectnum:: + .. _`objectspace`: .. _`Object Space`: From commits-noreply at bitbucket.org Thu Mar 17 19:01:04 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:01:04 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Mass rename of documentation files from .txt to .rst, to help editors recognize the format Message-ID: <20110317180104.B1B7D282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42744:1e4601255ed1 Date: 2011-03-14 14:50 -0400 http://bitbucket.org/pypy/pypy/changeset/1e4601255ed1/ Log: (dmalcolm, lac): Mass rename of documentation files from .txt to .rst, to help editors recognize the format Script used (in pypy/doc): for f in $(find -name "*.txt"); do hg rename $f $(echo $f | sed -e"s|.txt|.rst|"); done diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.rst copy from pypy/doc/config/objspace.std.withcelldict.txt copy to pypy/doc/config/objspace.std.withcelldict.rst diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.rst copy from pypy/doc/config/objspace.std.withprebuiltint.txt copy to pypy/doc/config/objspace.std.withprebuiltint.rst diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.rst copy from pypy/doc/discussion/cmd-prompt-translation.txt copy to pypy/doc/discussion/cmd-prompt-translation.rst diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/download.txt b/pypy/doc/download.rst copy from pypy/doc/download.txt copy to pypy/doc/download.rst diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.rst copy from pypy/doc/config/index.txt copy to pypy/doc/config/index.rst diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.rst copy from pypy/doc/config/translation.list_comprehension_operations.txt copy to pypy/doc/config/translation.list_comprehension_operations.rst diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.rst copy from pypy/doc/config/objspace.soabi.txt copy to pypy/doc/config/objspace.soabi.rst diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.rst copy from pypy/doc/config/objspace.name.txt copy to pypy/doc/config/objspace.name.rst diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.rst copy from pypy/doc/index-report.txt copy to pypy/doc/index-report.rst diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.rst copy from pypy/doc/config/translation.ootype.mangle.txt copy to pypy/doc/config/translation.ootype.mangle.rst diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.rst copy from pypy/doc/config/objspace.usemodules._random.txt copy to pypy/doc/config/objspace.usemodules._random.rst diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.rst copy from pypy/doc/config/objspace.std.withmapdict.txt copy to pypy/doc/config/objspace.std.withmapdict.rst diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.rst copy from pypy/doc/config/translation.stackless.txt copy to pypy/doc/config/translation.stackless.rst diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.rst copy from pypy/doc/discussion/use_case_of_logic.txt copy to pypy/doc/discussion/use_case_of_logic.rst diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.rst copy from pypy/doc/config/objspace.std.withprebuiltchar.txt copy to pypy/doc/config/objspace.std.withprebuiltchar.rst diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: - - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.rst copy from pypy/doc/config/translation.backend.txt copy to pypy/doc/config/translation.backend.rst diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.rst copy from pypy/doc/config/objspace.usemodules.token.txt copy to pypy/doc/config/objspace.usemodules.token.rst diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.rst copy from pypy/doc/config/translation.debug.txt copy to pypy/doc/config/translation.debug.rst diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.rst copy from pypy/doc/config/objspace.usemodules.math.txt copy to pypy/doc/config/objspace.usemodules.math.rst diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.rst copy from pypy/doc/config/translation.backendopt.txt copy to pypy/doc/config/translation.backendopt.rst diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.rst copy from pypy/doc/config/translation.jit.txt copy to pypy/doc/config/translation.jit.rst diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.rst copy from pypy/doc/config/objspace.std.builtinshortcut.txt copy to pypy/doc/config/objspace.std.builtinshortcut.rst diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.rst copy from pypy/doc/config/objspace.honor__builtins__.txt copy to pypy/doc/config/objspace.honor__builtins__.rst diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.rst copy from pypy/doc/config/objspace.std.multimethods.txt copy to pypy/doc/config/objspace.std.multimethods.rst diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.rst copy from pypy/doc/config/translation.linkerflags.txt copy to pypy/doc/config/translation.linkerflags.rst diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,120 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.rst copy from pypy/doc/how-to-release.txt copy to pypy/doc/how-to-release.rst diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.rst copy from pypy/doc/config/objspace.std.withmethodcache.txt copy to pypy/doc/config/objspace.std.withmethodcache.rst diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.rst copy from pypy/doc/config/objspace.usemodules.__pypy__.txt copy to pypy/doc/config/objspace.usemodules.__pypy__.rst diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.rst copy from pypy/doc/discussion/security-ideas.txt copy to pypy/doc/discussion/security-ideas.rst diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.rst copy from pypy/doc/discussion/parsing-ideas.txt copy to pypy/doc/discussion/parsing-ideas.rst diff --git a/pypy/doc/index.txt b/pypy/doc/index.rst copy from pypy/doc/index.txt copy to pypy/doc/index.rst diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.rst copy from pypy/doc/config/objspace.std.sharesmallstr.txt copy to pypy/doc/config/objspace.std.sharesmallstr.rst diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.rst copy from pypy/doc/config/translation.backendopt.stack_optimization.txt copy to pypy/doc/config/translation.backendopt.stack_optimization.rst diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: - - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.rst copy from pypy/doc/config/translation.txt copy to pypy/doc/config/translation.rst diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.rst copy from pypy/doc/config/translation.jit_backend.txt copy to pypy/doc/config/translation.jit_backend.rst diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.rst copy from pypy/doc/config/objspace.std.methodcachesizeexp.txt copy to pypy/doc/config/objspace.std.methodcachesizeexp.rst diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.rst copy from pypy/doc/config/objspace.std.txt copy to pypy/doc/config/objspace.std.rst diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.rst copy from pypy/doc/discussion/improve-rpython.txt copy to pypy/doc/discussion/improve-rpython.rst diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.rst copy from pypy/doc/config/objspace.usemodules._ast.txt copy to pypy/doc/config/objspace.usemodules._ast.rst diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.rst copy from pypy/doc/config/translation.backendopt.profile_based_inline.txt copy to pypy/doc/config/translation.backendopt.profile_based_inline.rst diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.rst copy from pypy/doc/config/objspace.usemodules._md5.txt copy to pypy/doc/config/objspace.usemodules._md5.rst diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.rst copy from pypy/doc/discussion/ctypes_todo.txt copy to pypy/doc/discussion/ctypes_todo.rst diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.rst copy from pypy/doc/config/objspace.std.withropeunicode.txt copy to pypy/doc/config/objspace.std.withropeunicode.rst diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.rst copy from pypy/doc/config/objspace.std.optimized_int_add.txt copy to pypy/doc/config/objspace.std.optimized_int_add.rst diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.rst copy from pypy/doc/distribution.txt copy to pypy/doc/distribution.rst diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.rst copy from pypy/doc/discussion/howtoimplementpickling.txt copy to pypy/doc/discussion/howtoimplementpickling.rst diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.rst copy from pypy/doc/discussion/compiled-swamp.txt copy to pypy/doc/discussion/compiled-swamp.rst diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.rst copy from pypy/doc/config/objspace.usemodules.operator.txt copy to pypy/doc/config/objspace.usemodules.operator.rst diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.rst copy from pypy/doc/config/objspace.usemodules.termios.txt copy to pypy/doc/config/objspace.usemodules.termios.rst diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.rst copy from pypy/doc/config/objspace.timing.txt copy to pypy/doc/config/objspace.timing.rst diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.rst copy from pypy/doc/config/objspace.usemodules.signal.txt copy to pypy/doc/config/objspace.usemodules.signal.rst diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.rst copy from pypy/doc/configuration.txt copy to pypy/doc/configuration.rst diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.rst copy from pypy/doc/jit/index.txt copy to pypy/doc/jit/index.rst diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.rst copy from pypy/doc/config/translation.shared.txt copy to pypy/doc/config/translation.shared.rst diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.rst copy from pypy/doc/config/objspace.usemodules.select.txt copy to pypy/doc/config/objspace.usemodules.select.rst diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: - - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.rst copy from pypy/doc/geninterp.txt copy to pypy/doc/geninterp.rst diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.rst copy from pypy/doc/discussion/somepbc-refactoring-plan.txt copy to pypy/doc/discussion/somepbc-refactoring-plan.rst diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.rst copy from pypy/doc/eventhistory.txt copy to pypy/doc/eventhistory.rst diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.rst copy from pypy/doc/discussion/testing-zope.txt copy to pypy/doc/discussion/testing-zope.rst diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.rst copy from pypy/doc/config/objspace.std.withrangelist.txt copy to pypy/doc/config/objspace.std.withrangelist.rst diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.rst copy from pypy/doc/config/translation.force_make.txt copy to pypy/doc/config/translation.force_make.rst diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.rst copy from pypy/doc/discussion/chained_getattr.txt copy to pypy/doc/discussion/chained_getattr.rst diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.rst copy from pypy/doc/config/objspace.std.newshortcut.txt copy to pypy/doc/config/objspace.std.newshortcut.rst diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.rst copy from pypy/doc/config/objspace.std.optimized_list_getitem.txt copy to pypy/doc/config/objspace.std.optimized_list_getitem.rst diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.rst copy from pypy/doc/config/objspace.usemodules.parser.txt copy to pypy/doc/config/objspace.usemodules.parser.rst diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.rst copy from pypy/doc/carbonpython.txt copy to pypy/doc/carbonpython.rst diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.rst copy from pypy/doc/config/translation.jit_profiler.txt copy to pypy/doc/config/translation.jit_profiler.rst diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.rst copy from pypy/doc/config/translation.output.txt copy to pypy/doc/config/translation.output.rst diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.rst copy from pypy/doc/config/objspace.usemodules._weakref.txt copy to pypy/doc/config/objspace.usemodules._weakref.rst diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.rst copy from pypy/doc/config/objspace.std.prebuiltintto.txt copy to pypy/doc/config/objspace.std.prebuiltintto.rst diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.rst copy from pypy/doc/discussion/paper-wishlist.txt copy to pypy/doc/discussion/paper-wishlist.rst diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.rst copy from pypy/doc/config/translation.vanilla.txt copy to pypy/doc/config/translation.vanilla.rst diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.rst copy from pypy/doc/interpreter-optimizations.txt copy to pypy/doc/interpreter-optimizations.rst diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.rst copy from pypy/doc/config/objspace.usemodules._multiprocessing.txt copy to pypy/doc/config/objspace.usemodules._multiprocessing.rst diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.rst copy from pypy/doc/config/objspace.usemodules.errno.txt copy to pypy/doc/config/objspace.usemodules.errno.rst diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.rst copy from pypy/doc/config/objspace.usemodules.posix.txt copy to pypy/doc/config/objspace.usemodules.posix.rst diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.rst copy from pypy/doc/config/objspace.std.getattributeshortcut.txt copy to pypy/doc/config/objspace.std.getattributeshortcut.rst diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.rst copy from pypy/doc/config/translation.platform.txt copy to pypy/doc/config/translation.platform.rst diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.rst copy from pypy/doc/discussion/VM-integration.txt copy to pypy/doc/discussion/VM-integration.rst diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.rst copy from pypy/doc/config/translation.fork_before.txt copy to pypy/doc/config/translation.fork_before.rst diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.rst copy from pypy/doc/dev_method.txt copy to pypy/doc/dev_method.rst diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.rst copy from pypy/doc/config/translation.builtins_can_raise_exceptions.txt copy to pypy/doc/config/translation.builtins_can_raise_exceptions.rst diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.rst copy from pypy/doc/config/translation.gcremovetypeptr.txt copy to pypy/doc/config/translation.gcremovetypeptr.rst diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.rst copy from pypy/doc/config/objspace.usemodules._lsprof.txt copy to pypy/doc/config/objspace.usemodules._lsprof.rst diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.rst copy from pypy/doc/config/objspace.usemodules._sha.txt copy to pypy/doc/config/objspace.usemodules._sha.rst diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/jit/overview.txt b/pypy/doc/jit/overview.txt deleted file mode 100644 --- a/pypy/doc/jit/overview.txt +++ /dev/null @@ -1,195 +0,0 @@ ------------------------------------------------------------------------- - Motivating JIT Compiler Generation ------------------------------------------------------------------------- - -.. contents:: -.. sectnum:: - -This is a non-technical introduction and motivation for PyPy's approach -to Just-In-Time compiler generation. - - -Motivation -======================================================================== - -Overview --------- - -Writing an interpreter for a complex dynamic language like Python is not -a small task, especially if, for performance goals, we want to write a -Just-in-Time (JIT) compiler too. - -The good news is that it's not what we did. We indeed wrote an -interpreter for Python, but we never wrote any JIT compiler for Python -in PyPy. Instead, we use the fact that our interpreter for Python is -written in RPython, which is a nice, high-level language -- and we turn -it *automatically* into a JIT compiler for Python. - -This transformation is of course completely transparent to the user, -i.e. the programmer writing Python programs. The goal (which we -achieved) is to support *all* Python features -- including, for example, -random frame access and debuggers. But it is also mostly transparent to -the language implementor, i.e. to the source code of the Python -interpreter. It only needs a bit of guidance: we had to put a small -number of hints in the source code of our interpreter. Based on these -hints, the *JIT compiler generator* produces a JIT compiler which has -the same language semantics as the original interpreter by construction. -This JIT compiler itself generates machine code at runtime, aggressively -optimizing the user's program and leading to a big performance boost, -while keeping the semantics unmodified. Of course, the interesting bit -is that our Python language interpreter can evolve over time without -getting out of sync with the JIT compiler. - - -The path we followed --------------------- - -Our previous incarnations of PyPy's JIT generator were based on partial -evaluation. This is a well-known and much-researched topic, considered -to be very promising. There have been many attempts to use it to -automatically transform an interpreter into a compiler. However, none of -them have lead to substantial speedups for real-world languages. We -believe that the missing key insight is to use partial evaluation to -produce just-in-time compilers, rather than classical ahead-of-time -compilers. If this turns out to be correct, the practical speed of -dynamic languages could be vastly improved. - -All these previous JIT compiler generators were producing JIT compilers -similar to the hand-written Psyco. But today, starting from 2009, our -prototype is no longer using partial evaluation -- at least not in a way -that would convince paper reviewers. It is instead based on the notion -of *tracing JIT,* recently studied for Java and JavaScript. When -compared to all existing tracing JITs so far, however, partial -evaluation gives us some extra techniques that we already had in our -previous JIT generators, notably how to optimize structures by removing -allocations. - -The closest comparison to our current JIT is Tamarin's TraceMonkey. -However, this JIT compiler is written manually, which is quite some -effort. In PyPy, we write a JIT generator at the level of RPython, -which means that our final JIT does not have to -- indeed, cannot -- be -written to encode all the details of the full Python language. These -details are automatically supplied by the fact that we have an -interpreter for full Python. - - -Practical results ------------------ - -The JIT compilers that we generate use some techniques that are not in -widespread use so far, but they are not exactly new either. The point -we want to make here is not that we are pushing the theoretical limits -of how fast a given dynamic language can be run. Our point is: we are -making it **practical** to have reasonably good Just-In-Time compilers -for all dynamic languages, no matter how complicated or non-widespread -(e.g. Open Source dynamic languages without large industry or academic -support, or internal domain-specific languages). By practical we mean -that this should be: - -* Easy: requires little more efforts than writing the interpreter in the - first place. - -* Maintainable: our generated JIT compilers are not separate projects - (we do not generate separate source code, but only throw-away C code - that is compiled into the generated VM). In other words, the whole - JIT compiler is regenerated anew every time the high-level interpreter - is modified, so that they cannot get out of sync no matter how fast - the language evolves. - -* Fast enough: we can get some rather good performance out of the - generated JIT compilers. That's the whole point, of course. - - -Alternative approaches to improve speed -======================================================================== - -+----------------------------------------------------------------------+ -| :NOTE: | -| | -| Please take the following section as just a statement of opinion. | -| In order to be debated over, the summaries should first be | -| expanded into full arguments. We include them here as links; | -| we are aware of them, even if sometimes pessimistic about them | -| ``:-)`` | -+----------------------------------------------------------------------+ - -There are a large number of approaches to improving the execution speed of -dynamic programming languages, most of which only produce small improvements -and none offer the flexibility and customisability provided by our approach. -Over the last 6 years of tweaking, the speed of CPython has only improved by a -factor of 1.3 or 1.4 (depending on benchmarks). Many tweaks are applicable to -PyPy as well. Indeed, some of the CPython tweaks originated as tweaks for PyPy. - -IronPython initially achieved a speed of about 1.8 times that of CPython by -leaving out some details of the language and by leveraging the large investment -that Microsoft has put into making the .NET platform fast; the current, more -complete implementation has roughly the same speed as CPython. In general, the -existing approaches have reached the end of the road, speed-wise. Microsoft's -Dynamic Language Runtime (DLR), often cited in this context, is essentially -only an API to make the techniques pioneered in IronPython official. At best, -it will give another small improvement. - -Another technique regularly mentioned is adding types to the language in order -to speed it up: either explicit optional typing or soft typing (i.e., inferred -"likely" types). For Python, all projects in this area have started with a -simplified subset of the language; no project has scaled up to anything close -to the complete language. This would be a major effort and be platform- and -language-specific. Moreover maintenance would be a headache: we believe that -many changes that are trivial to implement in CPython, are likely to invalidate -previous carefully-tuned optimizations. - -For major improvements in speed, JIT techniques are necessary. For Python, -Psyco gives typical speedups of 2 to 4 times - up to 100 times in algorithmic -examples. It has come to a dead end because of the difficulty and huge costs -associated with developing and maintaining it. It has a relatively poor -encoding of language semantics - knowledge about Python behavior needs to be -encoded by hand and kept up-to-date. At least, Psyco works correctly even when -encountering one of the numerous Python constructs it does not support, by -falling back to CPython. The PyPy JIT started out as a metaprogrammatic, -non-language-specific equivalent of Psyco. - -A different kind of prior art are self-hosting JIT compilers such as Jikes. -Jikes is a JIT compiler for Java written in Java. It has a poor encoding of -language semantics; it would take an enormous amount of work to encode all the -details of a Python-like language directly into a JIT compiler. It also has -limited portability, which is an issue for Python; it is likely that large -parts of the JIT compiler would need retargetting in order to run in a -different environment than the intended low-level one. - -Simply reusing an existing well-tuned JIT like that of the JVM does not -really work, because of concept mismatches between the implementor's -language and the host VM language: the former needs to be compiled to -the target environment in such a way that the JIT is able to speed it up -significantly - an approach which essentially has failed in Python so -far: even though CPython is a simple interpreter, its Java and .NET -re-implementations are not significantly faster. - -More recently, several larger projects have started in the JIT area. For -instance, Sun Microsystems is investing in JRuby, which aims to use the Java -Hotspot JIT to improve the performance of Ruby. However, this requires a lot of -hand crafting and will only provide speedups for one language on one platform. -Some issues are delicate, e.g., how to remove the overhead of constantly boxing -and unboxing, typical in dynamic languages. An advantage compared to PyPy is -that there are some hand optimizations that can be performed, that do not fit -in the metaprogramming approach. But metaprogramming makes the PyPy JIT -reusable for many different languages on many different execution platforms. -It is also possible to combine the approaches - we can get substantial speedups -using our JIT and then feed the result to Java's Hotspot JIT for further -improvement. One of us is even a member of the `JSR 292`_ Expert Group -to define additions to the JVM to better support dynamic languages, and -is contributing insights from our JIT research, in ways that will also -benefit PyPy. - -Finally, tracing JITs are now emerging for dynamic languages like -JavaScript with TraceMonkey. The code generated by PyPy is very similar -(but not hand-written) to the concepts of tracing JITs. - - -Further reading -======================================================================== - -The description of the current PyPy JIT generator is given in PyJitPl5_ -(draft). - -.. _`JSR 292`: http://jcp.org/en/jsr/detail?id=292 -.. _PyJitPl5: pyjitpl5.html diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.rst copy from pypy/doc/config/translation.instrumentctl.txt copy to pypy/doc/config/translation.instrumentctl.rst diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: - - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.rst copy from pypy/doc/discussion/cli-optimizations.txt copy to pypy/doc/discussion/cli-optimizations.rst diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.rst copy from pypy/doc/config/translation.backendopt.constfold.txt copy to pypy/doc/config/translation.backendopt.constfold.rst diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.rst copy from pypy/doc/config/objspace.usemodules.pyexpat.txt copy to pypy/doc/config/objspace.usemodules.pyexpat.rst diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.rst copy from pypy/doc/glossary.txt copy to pypy/doc/glossary.rst diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.rst copy from pypy/doc/config/objspace.usemodules._io.txt copy to pypy/doc/config/objspace.usemodules._io.rst diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.rst copy from pypy/doc/config/translation.backendopt.remove_asserts.txt copy to pypy/doc/config/translation.backendopt.remove_asserts.rst diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.rst copy from pypy/doc/config/objspace.translationmodules.txt copy to pypy/doc/config/objspace.translationmodules.rst diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.rst copy from pypy/doc/config/objspace.usemodules.array.txt copy to pypy/doc/config/objspace.usemodules.array.rst diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.rst copy from pypy/doc/config/translation.backendopt.mallocs.txt copy to pypy/doc/config/translation.backendopt.mallocs.rst diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.rst copy from pypy/doc/discussion/finalizer-order.txt copy to pypy/doc/discussion/finalizer-order.rst diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.rst copy from pypy/doc/config/translation.simplifying.txt copy to pypy/doc/config/translation.simplifying.rst diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.rst copy from pypy/doc/config/objspace.usemodules.cStringIO.txt copy to pypy/doc/config/objspace.usemodules.cStringIO.rst diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.rst copy from pypy/doc/discussion/gc.txt copy to pypy/doc/discussion/gc.rst diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.rst copy from pypy/doc/config/translation.rweakref.txt copy to pypy/doc/config/translation.rweakref.rst diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.rst copy from pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt copy to pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.rst diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.rst copy from pypy/doc/config/translation.withsmallfuncsets.txt copy to pypy/doc/config/translation.withsmallfuncsets.rst diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.rst copy from pypy/doc/docindex.txt copy to pypy/doc/docindex.rst diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.rst copy from pypy/doc/config/translation.cli.exception_transformer.txt copy to pypy/doc/config/translation.cli.exception_transformer.rst diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.rst copy from pypy/doc/config/objspace.usemodules._sre.txt copy to pypy/doc/config/objspace.usemodules._sre.rst diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.rst copy from pypy/doc/config/translation.taggedpointers.txt copy to pypy/doc/config/translation.taggedpointers.rst diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.rst copy from pypy/doc/config/objspace.usemodules.imp.txt copy to pypy/doc/config/objspace.usemodules.imp.rst diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.rst copy from pypy/doc/config/objspace.usemodules.time.txt copy to pypy/doc/config/objspace.usemodules.time.rst diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.rst copy from pypy/doc/discussion/distribution-implementation.txt copy to pypy/doc/discussion/distribution-implementation.rst diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.rst copy from pypy/doc/config/translation.backendopt.print_statistics.txt copy to pypy/doc/config/translation.backendopt.print_statistics.rst diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.rst copy from pypy/doc/clr-module.txt copy to pypy/doc/clr-module.rst diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.rst copy from pypy/doc/config/objspace.usemodules._warnings.txt copy to pypy/doc/config/objspace.usemodules._warnings.rst diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.rst copy from pypy/doc/discussion/GC-performance.txt copy to pypy/doc/discussion/GC-performance.rst diff --git a/pypy/doc/jit/overview.txt b/pypy/doc/jit/overview.rst copy from pypy/doc/jit/overview.txt copy to pypy/doc/jit/overview.rst diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.rst copy from pypy/doc/discussion/emptying-the-malloc-zoo.txt copy to pypy/doc/discussion/emptying-the-malloc-zoo.rst diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.rst copy from pypy/doc/config/translation.profopt.txt copy to pypy/doc/config/translation.profopt.rst diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.rst copy from pypy/doc/config/objspace.usemodules.clr.txt copy to pypy/doc/config/objspace.usemodules.clr.rst diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.rst copy from pypy/doc/config/objspace.usemodules._ssl.txt copy to pypy/doc/config/objspace.usemodules._ssl.rst diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.rst copy from pypy/doc/config/objspace.usemodules._socket.txt copy to pypy/doc/config/objspace.usemodules._socket.rst diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.rst copy from pypy/doc/config/translation.backendopt.inline.txt copy to pypy/doc/config/translation.backendopt.inline.rst diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.rst copy from pypy/doc/config/objspace.std.withsmalllong.txt copy to pypy/doc/config/objspace.std.withsmalllong.rst diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.rst copy from pypy/doc/config/objspace.opcodes.txt copy to pypy/doc/config/objspace.opcodes.rst diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - - -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.rst copy from pypy/doc/config/objspace.std.withrope.txt copy to pypy/doc/config/objspace.std.withrope.rst diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.rst copy from pypy/doc/config/objspace.usemodules.crypt.txt copy to pypy/doc/config/objspace.usemodules.crypt.rst diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.rst copy from pypy/doc/config/objspace.std.logspaceoptypes.txt copy to pypy/doc/config/objspace.std.logspaceoptypes.rst diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.rst copy from pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt copy to pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.rst diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.rst copy from pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt copy to pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.rst diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.rst copy from pypy/doc/config/translation.gcrootfinder.txt copy to pypy/doc/config/translation.gcrootfinder.rst diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.rst copy from pypy/doc/config/objspace.std.withdictmeasurement.txt copy to pypy/doc/config/objspace.std.withdictmeasurement.rst diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,226 +0,0 @@ - -Welcome to PyPy Development -============================================= - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -.. toctree:: - :maxdepth: 2 - - .. STUFF THAT'S BEEN THROUGH 1ST PASS CATEGORIZATION: - - .. The following stuff is high-value and (vaguely) true: - getting-started.txt - getting-started-python.txt - getting-started-dev.txt - faq.txt - architecture.txt - coding-guide.txt - cleanup-todo.txt - cpython_differences.txt - garbage_collection.txt - interpreter.txt - objspace.txt - - dev_method.txt - download.txt - extending.txt - windows.txt - - extradoc.txt - .. ^^ integrate this one level up: dcolish? - - glossary.txt - - contributor.txt - - .. True, high-detail: - interpreter-optimizations.txt - configuration.txt - low-level-encapsulation.txt - parser.txt - rlib.txt - rtyper.txt - translation.txt - jit/_ref.txt - jit/index.txt - jit/overview.txt - jit/pyjitpl5.txt - - ctypes-implementation.txt - .. ^^ needs attention - - how-to-release.txt - .. ^^ needs attention - - index-report.txt - .. ^^ of historic interest, and about EU fundraising - - maemo.txt - .. ^^ obscure corner; not sure of status - - stackless.txt - .. ^^ it still works; needs JIT integration; hasn't been maintained for years - - .. The following stuff is good material relating to unmaintained areas of the project: - .. .Net stuff: - cli-backend.txt - clr-module.txt - carbonpython.txt - - .. Release notes: - release-0.6.txt - release-0.7.0.txt - release-0.8.0.txt - release-0.9.0.txt - release-0.99.0.txt - release-1.0.0.txt - release-1.1.0.txt - release-1.2.0.txt - release-1.3.0.txt - release-1.4.0.txt - release-1.4.0beta.txt - release-1.4.1.txt - - - .. The following stuff is old (and crufty?), and needs further investigation: - buildtool.txt - distribution.txt - eventhistory.txt - .. ^^ Incomplete, superceded elsewhere - - externaltools.txt - .. ^^ Incomplete and wrong, superceded elsewhere - - geninterp.txt - .. ^^ apparently dead - - objspace-proxies.txt - - old_news.txt - - sprint-reports.txt - - project-ideas.txt - - rffi.txt - - sandbox.txt - .. ^^ it continues to work, but is unmaintained - - statistic/index.txt - - theory.txt - .. ^^ old ideas; we're not doing it this way any more - - translation-aspects.txt - .. ^^ old and needs updating - - .. This needs merging somehow: - docindex.txt - - .. Needs merging/replacing with hg stuff: - svn-help.txt - - .. The following discussions have not yet been categorized: - - discussion/GC-performance.txt - discussion/VM-integration.txt - discussion/chained_getattr.txt - discussion/cli-optimizations.txt - discussion/cmd-prompt-translation.txt - discussion/compiled-swamp.txt - discussion/ctypes_modules.txt - discussion/ctypes_todo.txt - discussion/distribution.txt - discussion/distribution-implementation.txt - discussion/distribution-newattempt.txt - discussion/distribution-roadmap.txt - discussion/emptying-the-malloc-zoo.txt - discussion/finalizer-order.txt - discussion/gc.txt - discussion/howtoimplementpickling.txt - discussion/improve-rpython.txt - discussion/outline-external-ootype.txt - discussion/oz-thread-api.txt - discussion/paper-wishlist.txt - discussion/parsing-ideas.txt - discussion/pypy_metaclasses_in_cl.txt - discussion/removing-stable-compiler.txt - discussion/security-ideas.txt - discussion/somepbc-refactoring-plan.txt - discussion/summer-of-pypy-pytest.txt - discussion/testing-zope.txt - discussion/thoughts_string_interning.txt - discussion/translation-swamp.txt - discussion/use_case_of_logic.txt - - .. STUFF THAT'S DIFFICULT TO CATEGORIZE - video-index.txt - - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` -* :ref:`glossary` - diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.rst copy from pypy/doc/config/objspace.usemodules.itertools.txt copy to pypy/doc/config/objspace.usemodules.itertools.rst diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.rst copy from pypy/doc/config/objspace.usemodules.txt copy to pypy/doc/config/objspace.usemodules.rst diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.rst copy from pypy/doc/config/objspace.usemodules._rawffi.txt copy to pypy/doc/config/objspace.usemodules._rawffi.rst diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.rst copy from pypy/doc/architecture.txt copy to pypy/doc/architecture.rst diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.rst copy from pypy/doc/config/objspace.usemodules._demo.txt copy to pypy/doc/config/objspace.usemodules._demo.rst diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.rst copy from pypy/doc/cpython_differences.txt copy to pypy/doc/cpython_differences.rst diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.rst copy from pypy/doc/config/translation.ootype.txt copy to pypy/doc/config/translation.ootype.rst diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.rst copy from pypy/doc/config/objspace.usemodules._hashlib.txt copy to pypy/doc/config/objspace.usemodules._hashlib.rst diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.rst copy from pypy/doc/discussion/distribution.txt copy to pypy/doc/discussion/distribution.rst diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.rst copy from pypy/doc/config/translation.cc.txt copy to pypy/doc/config/translation.cc.rst diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.rst copy from pypy/doc/config/objspace.lonepycfiles.txt copy to pypy/doc/config/objspace.lonepycfiles.rst diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.rst copy from pypy/doc/config/objspace.std.withtypeversion.txt copy to pypy/doc/config/objspace.std.withtypeversion.rst diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.rst copy from pypy/doc/config/objspace.usemodules.fcntl.txt copy to pypy/doc/config/objspace.usemodules.fcntl.rst diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.rst copy from pypy/doc/config/objspace.usemodules.marshal.txt copy to pypy/doc/config/objspace.usemodules.marshal.rst diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.rst copy from pypy/doc/config/objspace.nofaking.txt copy to pypy/doc/config/objspace.nofaking.rst diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.rst copy from pypy/doc/__pypy__-module.txt copy to pypy/doc/__pypy__-module.rst diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.rst copy from pypy/doc/config/opt.txt copy to pypy/doc/config/opt.rst diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.rst copy from pypy/doc/config/objspace.usemodules.exceptions.txt copy to pypy/doc/config/objspace.usemodules.exceptions.rst diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.rst copy from pypy/doc/config/objspace.std.withstrjoin.txt copy to pypy/doc/config/objspace.std.withstrjoin.rst diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.rst copy from pypy/doc/config/objspace.usemodules.gc.txt copy to pypy/doc/config/objspace.usemodules.gc.rst diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.rst copy from pypy/doc/config/objspace.usemodules.micronumpy.txt copy to pypy/doc/config/objspace.usemodules.micronumpy.rst diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.rst copy from pypy/doc/config/translation.log.txt copy to pypy/doc/config/translation.log.rst diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.rst copy from pypy/doc/discussion/oz-thread-api.txt copy to pypy/doc/discussion/oz-thread-api.rst diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.rst copy from pypy/doc/config/objspace.usemodules.rbench.txt copy to pypy/doc/config/objspace.usemodules.rbench.rst diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.rst copy from pypy/doc/config/objspace.std.withstrbuf.txt copy to pypy/doc/config/objspace.std.withstrbuf.rst diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.rst copy from pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt copy to pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.rst diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.rst copy from pypy/doc/config/objspace.usemodules.symbol.txt copy to pypy/doc/config/objspace.usemodules.symbol.rst diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.rst copy from pypy/doc/cli-backend.txt copy to pypy/doc/cli-backend.rst diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.rst copy from pypy/doc/getting-started-dev.txt copy to pypy/doc/getting-started-dev.rst diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.rst copy from pypy/doc/interpreter.txt copy to pypy/doc/interpreter.rst diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.rst copy from pypy/doc/config/translation.cli.txt copy to pypy/doc/config/translation.cli.rst diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.rst copy from pypy/doc/getting-started.txt copy to pypy/doc/getting-started.rst diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.rst copy from pypy/doc/config/translation.backendopt.none.txt copy to pypy/doc/config/translation.backendopt.none.rst diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.rst copy from pypy/doc/config/objspace.std.optimized_comparison_op.txt copy to pypy/doc/config/objspace.std.optimized_comparison_op.rst diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.txt deleted file mode 100644 --- a/pypy/doc/jit/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -======================================================================== - JIT documentation -======================================================================== - -:abstract: - - When PyPy is translated into an executable like ``pypy-c``, the - executable contains a full virtual machine that can optionally - include a Just-In-Time compiler. This JIT compiler is **generated - automatically from the interpreter** that we wrote in RPython. - - This JIT Compiler Generator can be applied on interpreters for any - language, as long as the interpreter itself is written in RPython - and contains a few hints to guide the JIT Compiler Generator. - - -Content ------------------------------------------------------------- - -- Overview_: motivating our approach - -- Notes_ about the current work in PyPy - - -.. _Overview: overview.html -.. _Notes: pyjitpl5.html diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.rst copy from pypy/doc/coding-guide.txt copy to pypy/doc/coding-guide.rst diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.rst copy from pypy/doc/config/objspace.geninterp.txt copy to pypy/doc/config/objspace.geninterp.rst diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.rst copy from pypy/doc/config/objspace.usemodules.zipimport.txt copy to pypy/doc/config/objspace.usemodules.zipimport.rst diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.rst copy from pypy/doc/config/objspace.opcodes.CALL_METHOD.txt copy to pypy/doc/config/objspace.opcodes.CALL_METHOD.rst diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.rst copy from pypy/doc/config/objspace.disable_call_speedhacks.txt copy to pypy/doc/config/objspace.disable_call_speedhacks.rst diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.rst copy from pypy/doc/config/translation.countmallocs.txt copy to pypy/doc/config/translation.countmallocs.rst diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.rst copy from pypy/doc/config/objspace.usemodules._codecs.txt copy to pypy/doc/config/objspace.usemodules._codecs.rst diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.rst copy from pypy/doc/externaltools.txt copy to pypy/doc/externaltools.rst diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.rst copy from pypy/doc/extending.txt copy to pypy/doc/extending.rst diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.rst copy from pypy/doc/config/translation.backendopt.merge_if_blocks.txt copy to pypy/doc/config/translation.backendopt.merge_if_blocks.rst diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.rst copy from pypy/doc/buildtool.txt copy to pypy/doc/buildtool.rst diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.rst copy from pypy/doc/config/translation.backendopt.raisingop2direct_call.txt copy to pypy/doc/config/translation.backendopt.raisingop2direct_call.rst diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.rst copy from pypy/doc/jit/_ref.txt copy to pypy/doc/jit/_ref.rst diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.rst copy from pypy/doc/config/objspace.usemodules._file.txt copy to pypy/doc/config/objspace.usemodules._file.rst diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.rst copy from pypy/doc/config/objspace.usemodules.pypyjit.txt copy to pypy/doc/config/objspace.usemodules.pypyjit.rst diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.rst copy from pypy/doc/config/translation.secondaryentrypoints.txt copy to pypy/doc/config/translation.secondaryentrypoints.rst diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.rst copy from pypy/doc/config/translation.backendopt.really_remove_asserts.txt copy to pypy/doc/config/translation.backendopt.really_remove_asserts.rst diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.rst copy from pypy/doc/config/translation.make_jobs.txt copy to pypy/doc/config/translation.make_jobs.rst diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.rst copy from pypy/doc/config/objspace.txt copy to pypy/doc/config/objspace.rst diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.rst copy from pypy/doc/discussion/thoughts_string_interning.txt copy to pypy/doc/discussion/thoughts_string_interning.rst diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.rst copy from pypy/doc/contributor.txt copy to pypy/doc/contributor.rst diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.rst copy from pypy/doc/config/translation.backendopt.clever_malloc_removal.txt copy to pypy/doc/config/translation.backendopt.clever_malloc_removal.rst diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.rst copy from pypy/doc/config/objspace.usemodules.sys.txt copy to pypy/doc/config/objspace.usemodules.sys.rst diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.rst copy from pypy/doc/getting-started-python.txt copy to pypy/doc/getting-started-python.rst diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.rst copy from pypy/doc/config/translation.noprofopt.txt copy to pypy/doc/config/translation.noprofopt.rst diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.rst copy from pypy/doc/discussion/ctypes_modules.txt copy to pypy/doc/discussion/ctypes_modules.rst diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.rst copy from pypy/doc/config/objspace.std.prebuiltintfrom.txt copy to pypy/doc/config/objspace.std.prebuiltintfrom.rst diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.rst copy from pypy/doc/config/objspace.std.withtproxy.txt copy to pypy/doc/config/objspace.std.withtproxy.rst diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.rst copy from pypy/doc/config/translation.instrument.txt copy to pypy/doc/config/translation.instrument.rst diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.rst copy from pypy/doc/config/objspace.usemodules._winreg.txt copy to pypy/doc/config/objspace.usemodules._winreg.rst diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.rst copy from pypy/doc/config/translation.jit_ffi.txt copy to pypy/doc/config/translation.jit_ffi.rst diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.rst copy from pypy/doc/config/translation.verbose.txt copy to pypy/doc/config/translation.verbose.rst diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.rst copy from pypy/doc/config/translation.compilerflags.txt copy to pypy/doc/config/translation.compilerflags.rst diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.rst copy from pypy/doc/discussion/removing-stable-compiler.txt copy to pypy/doc/discussion/removing-stable-compiler.rst diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.rst copy from pypy/doc/config/objspace.std.withsmallint.txt copy to pypy/doc/config/objspace.std.withsmallint.rst diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.rst copy from pypy/doc/config/objspace.usemodules._locale.txt copy to pypy/doc/config/objspace.usemodules._locale.rst diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.rst copy from pypy/doc/config/translation.gctransformer.txt copy to pypy/doc/config/translation.gctransformer.rst diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,247 +0,0 @@ -.. _glossary: - -******** -Glossary -******** - -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -.. if you add new entries, keep the alphabetical sorting! - -.. glossary:: - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.rst copy from pypy/doc/config/objspace.usemodules.binascii.txt copy to pypy/doc/config/objspace.usemodules.binascii.rst diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.rst copy from pypy/doc/ctypes-implementation.txt copy to pypy/doc/ctypes-implementation.rst diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.rst copy from pypy/doc/config/objspace.usemodules.zlib.txt copy to pypy/doc/config/objspace.usemodules.zlib.rst diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.rst copy from pypy/doc/config/commandline.txt copy to pypy/doc/config/commandline.rst diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.rst copy from pypy/doc/config/objspace.std.mutable_builtintypes.txt copy to pypy/doc/config/objspace.std.mutable_builtintypes.rst diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.rst copy from pypy/doc/config/objspace.usemodules.thread.txt copy to pypy/doc/config/objspace.usemodules.thread.rst diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.rst copy from pypy/doc/config/objspace.usemodules.mmap.txt copy to pypy/doc/config/objspace.usemodules.mmap.rst diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.rst copy from pypy/doc/discussion/outline-external-ootype.txt copy to pypy/doc/discussion/outline-external-ootype.rst diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.rst copy from pypy/doc/config/objspace.std.withmethodcachecounter.txt copy to pypy/doc/config/objspace.std.withmethodcachecounter.rst diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.rst copy from pypy/doc/extradoc.txt copy to pypy/doc/extradoc.rst diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.rst copy from pypy/doc/config/objspace.usemodules.rctime.txt copy to pypy/doc/config/objspace.usemodules.rctime.rst diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.rst copy from pypy/doc/config/translation.backendopt.inline_heuristic.txt copy to pypy/doc/config/translation.backendopt.inline_heuristic.rst diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.rst copy from pypy/doc/config/objspace.usemodules._minimal_curses.txt copy to pypy/doc/config/objspace.usemodules._minimal_curses.rst diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.rst copy from pypy/doc/faq.txt copy to pypy/doc/faq.rst diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.rst copy from pypy/doc/config/objspace.usemodules._pickle_support.txt copy to pypy/doc/config/objspace.usemodules._pickle_support.rst diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.rst copy from pypy/doc/config/objspace.usemodules.oracle.txt copy to pypy/doc/config/objspace.usemodules.oracle.rst diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: - - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.rst copy from pypy/doc/config/objspace.usemodules._collections.txt copy to pypy/doc/config/objspace.usemodules._collections.rst diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.rst copy from pypy/doc/config/objspace.usemodules._testing.txt copy to pypy/doc/config/objspace.usemodules._testing.rst diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.rst copy from pypy/doc/config/objspace.usemodules.struct.txt copy to pypy/doc/config/objspace.usemodules.struct.rst diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.rst copy from pypy/doc/config/translation.cli.trace_calls.txt copy to pypy/doc/config/translation.cli.trace_calls.rst diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.rst copy from pypy/doc/garbage_collection.txt copy to pypy/doc/garbage_collection.rst diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.rst copy from pypy/doc/config/objspace.std.withstrslice.txt copy to pypy/doc/config/objspace.std.withstrslice.rst diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.rst copy from pypy/doc/config/translation.dump_static_data_info.txt copy to pypy/doc/config/translation.dump_static_data_info.rst diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.rst copy from pypy/doc/config/objspace.allworkingmodules.txt copy to pypy/doc/config/objspace.allworkingmodules.rst diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.rst copy from pypy/doc/config/translation.sandbox.txt copy to pypy/doc/config/translation.sandbox.rst diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.rst copy from pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt copy to pypy/doc/config/translation.backendopt.profile_based_inline_threshold.rst diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.rst copy from pypy/doc/discussion/pypy_metaclasses_in_cl.txt copy to pypy/doc/discussion/pypy_metaclasses_in_cl.rst diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.rst copy from pypy/doc/config/translation.gc.txt copy to pypy/doc/config/translation.gc.rst diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.rst copy from pypy/doc/config/objspace.usemodules.bz2.txt copy to pypy/doc/config/objspace.usemodules.bz2.rst diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.rst copy from pypy/doc/discussion/summer-of-pypy-pytest.txt copy to pypy/doc/discussion/summer-of-pypy-pytest.rst diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.rst copy from pypy/doc/config/objspace.usemodules.unicodedata.txt copy to pypy/doc/config/objspace.usemodules.unicodedata.rst diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: - - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.rst copy from pypy/doc/config/translation.type_system.txt copy to pypy/doc/config/translation.type_system.rst diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.rst copy from pypy/doc/cleanup-todo.txt copy to pypy/doc/cleanup-todo.rst diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.rst copy from pypy/doc/config/objspace.logbytecodes.txt copy to pypy/doc/config/objspace.logbytecodes.rst diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.rst copy from pypy/doc/discussion/translation-swamp.txt copy to pypy/doc/discussion/translation-swamp.rst diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.rst copy from pypy/doc/config/objspace.usemodules.__builtin__.txt copy to pypy/doc/config/objspace.usemodules.__builtin__.rst diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.rst copy from pypy/doc/config/objspace.usemodules._bisect.txt copy to pypy/doc/config/objspace.usemodules._bisect.rst diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.rst copy from pypy/doc/config/translation.insist.txt copy to pypy/doc/config/translation.insist.rst diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.rst copy from pypy/doc/config/objspace.usepycfiles.txt copy to pypy/doc/config/objspace.usepycfiles.rst diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.rst copy from pypy/doc/config/objspace.usemodules.cpyext.txt copy to pypy/doc/config/objspace.usemodules.cpyext.rst diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.rst copy from pypy/doc/_ref.txt copy to pypy/doc/_ref.rst diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.rst copy from pypy/doc/config/objspace.usemodules._ffi.txt copy to pypy/doc/config/objspace.usemodules._ffi.rst diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.rst copy from pypy/doc/config/translation.backendopt.inline_threshold.txt copy to pypy/doc/config/translation.backendopt.inline_threshold.rst diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.rst copy from pypy/doc/config/objspace.usemodules._stackless.txt copy to pypy/doc/config/objspace.usemodules._stackless.rst diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.rst copy from pypy/doc/discussion/distribution-roadmap.txt copy to pypy/doc/discussion/distribution-roadmap.rst diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.rst copy from pypy/doc/config/translation.backendopt.storesink.txt copy to pypy/doc/config/translation.backendopt.storesink.rst diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.rst copy from pypy/doc/discussion/distribution-newattempt.txt copy to pypy/doc/discussion/distribution-newattempt.rst diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.rst copy from pypy/doc/config/translation.thread.txt copy to pypy/doc/config/translation.thread.rst diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.rst copy from pypy/doc/config/translation.no__thread.txt copy to pypy/doc/config/translation.no__thread.rst diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.rst copy from pypy/doc/config/objspace.usemodules.cmath.txt copy to pypy/doc/config/objspace.usemodules.cmath.rst diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... From commits-noreply at bitbucket.org Thu Mar 17 19:01:05 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:01:05 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Change "source_suffix" from .txt to .rst to reflect the mass-renaming; use "index" rather than "temp_index" Message-ID: <20110317180105.610A3282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42745:09d7d1837621 Date: 2011-03-14 15:02 -0400 http://bitbucket.org/pypy/pypy/changeset/09d7d1837621/ Log: (dmalcolm, lac): Change "source_suffix" from .txt to .rst to reflect the mass-renaming; use "index" rather than "temp_index" diff --git a/pypy/doc/conf.py b/pypy/doc/conf.py --- a/pypy/doc/conf.py +++ b/pypy/doc/conf.py @@ -28,13 +28,13 @@ templates_path = ['_templates'] # The suffix of source filenames. -source_suffix = '.txt' +source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. -master_doc = 'temp_index' +master_doc = 'index' # General information about the project. project = u'PyPy' From commits-noreply at bitbucket.org Thu Mar 17 19:01:05 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:01:05 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Update filenames in the table-of-contents to reflect the mass-renaming Message-ID: <20110317180105.D8618282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42746:c2797ba937f9 Date: 2011-03-14 15:03 -0400 http://bitbucket.org/pypy/pypy/changeset/c2797ba937f9/ Log: (dmalcolm, lac): Update filenames in the table-of-contents to reflect the mass-renaming diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -14,153 +14,153 @@ .. STUFF THAT'S BEEN THROUGH 1ST PASS CATEGORIZATION: .. The following stuff is high-value and (vaguely) true: - getting-started.txt - getting-started-python.txt - getting-started-dev.txt - faq.txt - architecture.txt - coding-guide.txt - cleanup-todo.txt - cpython_differences.txt - garbage_collection.txt - interpreter.txt - objspace.txt + getting-started.rst + getting-started-python.rst + getting-started-dev.rst + faq.rst + architecture.rst + coding-guide.rst + cleanup-todo.rst + cpython_differences.rst + garbage_collection.rst + interpreter.rst + objspace.rst - dev_method.txt - download.txt - extending.txt - windows.txt + dev_method.rst + download.rst + extending.rst + windows.rst - extradoc.txt + extradoc.rst .. ^^ integrate this one level up: dcolish? - glossary.txt + glossary.rst - contributor.txt + contributor.rst .. True, high-detail: - interpreter-optimizations.txt - configuration.txt - low-level-encapsulation.txt - parser.txt - rlib.txt - rtyper.txt - translation.txt - jit/_ref.txt - jit/index.txt - jit/overview.txt - jit/pyjitpl5.txt + interpreter-optimizations.rst + configuration.rst + low-level-encapsulation.rst + parser.rst + rlib.rst + rtyper.rst + translation.rst + jit/_ref.rst + jit/index.rst + jit/overview.rst + jit/pyjitpl5.rst - ctypes-implementation.txt + ctypes-implementation.rst .. ^^ needs attention - how-to-release.txt + how-to-release.rst .. ^^ needs attention - index-report.txt + index-report.rst .. ^^ of historic interest, and about EU fundraising - maemo.txt + maemo.rst .. ^^ obscure corner; not sure of status - stackless.txt + stackless.rst .. ^^ it still works; needs JIT integration; hasn't been maintained for years .. The following stuff is good material relating to unmaintained areas of the project: .. .Net stuff: - cli-backend.txt - clr-module.txt - carbonpython.txt + cli-backend.rst + clr-module.rst + carbonpython.rst .. Release notes: - release-0.6.txt - release-0.7.0.txt - release-0.8.0.txt - release-0.9.0.txt - release-0.99.0.txt - release-1.0.0.txt - release-1.1.0.txt - release-1.2.0.txt - release-1.3.0.txt - release-1.4.0.txt - release-1.4.0beta.txt - release-1.4.1.txt + release-0.6.rst + release-0.7.0.rst + release-0.8.0.rst + release-0.9.0.rst + release-0.99.0.rst + release-1.0.0.rst + release-1.1.0.rst + release-1.2.0.rst + release-1.3.0.rst + release-1.4.0.rst + release-1.4.0beta.rst + release-1.4.1.rst .. The following stuff is old (and crufty?), and needs further investigation: - buildtool.txt - distribution.txt - eventhistory.txt + buildtool.rst + distribution.rst + eventhistory.rst .. ^^ Incomplete, superceded elsewhere - externaltools.txt + externaltools.rst .. ^^ Incomplete and wrong, superceded elsewhere - geninterp.txt + geninterp.rst .. ^^ apparently dead - objspace-proxies.txt + objspace-proxies.rst - old_news.txt + old_news.rst - sprint-reports.txt + sprint-reports.rst - project-ideas.txt + project-ideas.rst - rffi.txt + rffi.rst - sandbox.txt + sandbox.rst .. ^^ it continues to work, but is unmaintained - statistic/index.txt + statistic/index.rst - theory.txt + theory.rst .. ^^ old ideas; we're not doing it this way any more - translation-aspects.txt + translation-aspects.rst .. ^^ old and needs updating .. This needs merging somehow: - docindex.txt + docindex.rst .. Needs merging/replacing with hg stuff: - svn-help.txt + svn-help.rst .. The following discussions have not yet been categorized: - discussion/GC-performance.txt - discussion/VM-integration.txt - discussion/chained_getattr.txt - discussion/cli-optimizations.txt - discussion/cmd-prompt-translation.txt - discussion/compiled-swamp.txt - discussion/ctypes_modules.txt - discussion/ctypes_todo.txt - discussion/distribution.txt - discussion/distribution-implementation.txt - discussion/distribution-newattempt.txt - discussion/distribution-roadmap.txt - discussion/emptying-the-malloc-zoo.txt - discussion/finalizer-order.txt - discussion/gc.txt - discussion/howtoimplementpickling.txt - discussion/improve-rpython.txt - discussion/outline-external-ootype.txt - discussion/oz-thread-api.txt - discussion/paper-wishlist.txt - discussion/parsing-ideas.txt - discussion/pypy_metaclasses_in_cl.txt - discussion/removing-stable-compiler.txt - discussion/security-ideas.txt - discussion/somepbc-refactoring-plan.txt - discussion/summer-of-pypy-pytest.txt - discussion/testing-zope.txt - discussion/thoughts_string_interning.txt - discussion/translation-swamp.txt - discussion/use_case_of_logic.txt + discussion/GC-performance.rst + discussion/VM-integration.rst + discussion/chained_getattr.rst + discussion/cli-optimizations.rst + discussion/cmd-prompt-translation.rst + discussion/compiled-swamp.rst + discussion/ctypes_modules.rst + discussion/ctypes_todo.rst + discussion/distribution.rst + discussion/distribution-implementation.rst + discussion/distribution-newattempt.rst + discussion/distribution-roadmap.rst + discussion/emptying-the-malloc-zoo.rst + discussion/finalizer-order.rst + discussion/gc.rst + discussion/howtoimplementpickling.rst + discussion/improve-rpython.rst + discussion/outline-external-ootype.rst + discussion/oz-thread-api.rst + discussion/paper-wishlist.rst + discussion/parsing-ideas.rst + discussion/pypy_metaclasses_in_cl.rst + discussion/removing-stable-compiler.rst + discussion/security-ideas.rst + discussion/somepbc-refactoring-plan.rst + discussion/summer-of-pypy-pytest.rst + discussion/testing-zope.rst + discussion/thoughts_string_interning.rst + discussion/translation-swamp.rst + discussion/use_case_of_logic.rst .. STUFF THAT'S DIFFICULT TO CATEGORIZE - video-index.txt + video-index.rst Getting into PyPy ... From commits-noreply at bitbucket.org Thu Mar 17 19:01:18 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:01:18 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Update references to _ref.txt to _ref.rst, reflecting the mass-renaming Message-ID: <20110317180118.3ABFA282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42747:ff0cf030524b Date: 2011-03-14 15:04 -0400 http://bitbucket.org/pypy/pypy/changeset/ff0cf030524b/ Log: (dmalcolm, lac): Update references to _ref.txt to _ref.rst, reflecting the mass-renaming diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -421,5 +421,5 @@ .. _`directory reference`: docindex.html#directory-reference -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/glossary.rst b/pypy/doc/glossary.rst --- a/pypy/doc/glossary.rst +++ b/pypy/doc/glossary.rst @@ -244,4 +244,4 @@ .. _`subsystem implementing the Python language`: architecture.html#standard-interpreter .. _Theory: theory.html -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/cpython_differences.rst b/pypy/doc/cpython_differences.rst --- a/pypy/doc/cpython_differences.rst +++ b/pypy/doc/cpython_differences.rst @@ -222,4 +222,4 @@ *more* case on PyPy than on CPython 2.6/2.7.) -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/garbage_collection.rst b/pypy/doc/garbage_collection.rst --- a/pypy/doc/garbage_collection.rst +++ b/pypy/doc/garbage_collection.rst @@ -124,4 +124,4 @@ More details are available as comments at the start of the source in `rpython/memory/gc/markcompact.py`_. -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/interpreter.rst b/pypy/doc/interpreter.rst --- a/pypy/doc/interpreter.rst +++ b/pypy/doc/interpreter.rst @@ -407,4 +407,4 @@ as a reference for the exact attributes of interpreter classes visible at application level. -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/translation.rst b/pypy/doc/translation.rst --- a/pypy/doc/translation.rst +++ b/pypy/doc/translation.rst @@ -768,4 +768,4 @@ collection of functions (which may refer to each other in a mutually recursive fashion) and annotate and rtype them all at once. -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/architecture.rst b/pypy/doc/architecture.rst --- a/pypy/doc/architecture.rst +++ b/pypy/doc/architecture.rst @@ -260,5 +260,5 @@ .. _`generate Just-In-Time Compilers`: jit/index.html .. _`JIT Generation in PyPy`: jit/index.html -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/docindex.rst b/pypy/doc/docindex.rst --- a/pypy/doc/docindex.rst +++ b/pypy/doc/docindex.rst @@ -310,5 +310,5 @@ .. _`graph viewer`: getting-started-dev.html#try-out-the-translator .. _`compatibility matrix`: image/compat-matrix.png -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -299,4 +299,4 @@ .. _clr: clr-module.html .. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/rtyper.rst b/pypy/doc/rtyper.rst --- a/pypy/doc/rtyper.rst +++ b/pypy/doc/rtyper.rst @@ -791,4 +791,4 @@ assert res == ~3 .. _annotator: translation.html#the-annotation-pass -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/objspace-proxies.rst b/pypy/doc/objspace-proxies.rst --- a/pypy/doc/objspace-proxies.rst +++ b/pypy/doc/objspace-proxies.rst @@ -615,4 +615,4 @@ .. [D12.1] `High-Level Backends and Interpreter Feature Prototypes`, PyPy EU-Report, 2007, http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/parser.rst b/pypy/doc/parser.rst --- a/pypy/doc/parser.rst +++ b/pypy/doc/parser.rst @@ -100,4 +100,4 @@ information like the line number table and stack depth are computed. Finally, everything is passed to a brand new ``PyCode`` object. -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst --- a/pypy/doc/coding-guide.rst +++ b/pypy/doc/coding-guide.rst @@ -1085,4 +1085,4 @@ which will check that remote URLs are reachable. -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/objspace.rst b/pypy/doc/objspace.rst --- a/pypy/doc/objspace.rst +++ b/pypy/doc/objspace.rst @@ -650,4 +650,4 @@ .. _`What PyPy can do for your objects`: objspace-proxies.html -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/getting-started.rst b/pypy/doc/getting-started.rst --- a/pypy/doc/getting-started.rst +++ b/pypy/doc/getting-started.rst @@ -117,4 +117,4 @@ .. _bug reports: https://codespeak.net/issue/pypy-dev/ -.. include:: _ref.txt +.. include:: _ref.rst diff --git a/pypy/doc/faq.rst b/pypy/doc/faq.rst --- a/pypy/doc/faq.rst +++ b/pypy/doc/faq.rst @@ -416,7 +416,7 @@ .. _`RPython`: coding-guide.html#rpython .. _`getting-started`: getting-started.html -.. include:: _ref.txt +.. include:: _ref.rst ---------------------------------------------------------- Why does PyPy draw a Mandelbrot fractal while translating? diff --git a/pypy/doc/stackless.rst b/pypy/doc/stackless.rst --- a/pypy/doc/stackless.rst +++ b/pypy/doc/stackless.rst @@ -619,4 +619,4 @@ .. _`documentation of the greenlets`: http://codespeak.net/svn/greenlet/trunk/doc/greenlet.txt .. _`Stackless Transform`: translation.html#the-stackless-transform -.. include:: _ref.txt +.. include:: _ref.rst From commits-noreply at bitbucket.org Thu Mar 17 19:04:12 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Thu, 17 Mar 2011 19:04:12 +0100 (CET) Subject: [pypy-svn] pypy default: merge docs changes Message-ID: <20110317180412.EDC3F282BD6@codespeak.net> Author: Alex Perry Branch: Changeset: r42748:9abd94c2bd68 Date: 2011-03-14 19:11 +0000 http://bitbucket.org/pypy/pypy/changeset/9abd94c2bd68/ Log: merge docs changes diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - -.. sectnum:: -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: -.. sectnum:: - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,237 +0,0 @@ -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,59 +0,0 @@ - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.txt deleted file mode 100644 --- a/pypy/doc/jit/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -======================================================================== - JIT documentation -======================================================================== - -:abstract: - - When PyPy is translated into an executable like ``pypy-c``, the - executable contains a full virtual machine that can optionally - include a Just-In-Time compiler. This JIT compiler is **generated - automatically from the interpreter** that we wrote in RPython. - - This JIT Compiler Generator can be applied on interpreters for any - language, as long as the interpreter itself is written in RPython - and contains a few hints to guide the JIT Compiler Generator. - - -Content ------------------------------------------------------------- - -- Overview_: motivating our approach - -- Notes_ about the current work in PyPy - - -.. _Overview: overview.html -.. _Notes: pyjitpl5.html diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/.hgsubstate b/.hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,3 +1,3 @@ 80037 greenlet -80348 lib_pypy/pyrepl +80409 lib_pypy/pyrepl 80409 testrunner diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,123 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. contents:: -.. sectnum:: - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/jit/overview.txt b/pypy/doc/jit/overview.txt deleted file mode 100644 --- a/pypy/doc/jit/overview.txt +++ /dev/null @@ -1,195 +0,0 @@ ------------------------------------------------------------------------- - Motivating JIT Compiler Generation ------------------------------------------------------------------------- - -.. contents:: -.. sectnum:: - -This is a non-technical introduction and motivation for PyPy's approach -to Just-In-Time compiler generation. - - -Motivation -======================================================================== - -Overview --------- - -Writing an interpreter for a complex dynamic language like Python is not -a small task, especially if, for performance goals, we want to write a -Just-in-Time (JIT) compiler too. - -The good news is that it's not what we did. We indeed wrote an -interpreter for Python, but we never wrote any JIT compiler for Python -in PyPy. Instead, we use the fact that our interpreter for Python is -written in RPython, which is a nice, high-level language -- and we turn -it *automatically* into a JIT compiler for Python. - -This transformation is of course completely transparent to the user, -i.e. the programmer writing Python programs. The goal (which we -achieved) is to support *all* Python features -- including, for example, -random frame access and debuggers. But it is also mostly transparent to -the language implementor, i.e. to the source code of the Python -interpreter. It only needs a bit of guidance: we had to put a small -number of hints in the source code of our interpreter. Based on these -hints, the *JIT compiler generator* produces a JIT compiler which has -the same language semantics as the original interpreter by construction. -This JIT compiler itself generates machine code at runtime, aggressively -optimizing the user's program and leading to a big performance boost, -while keeping the semantics unmodified. Of course, the interesting bit -is that our Python language interpreter can evolve over time without -getting out of sync with the JIT compiler. - - -The path we followed --------------------- - -Our previous incarnations of PyPy's JIT generator were based on partial -evaluation. This is a well-known and much-researched topic, considered -to be very promising. There have been many attempts to use it to -automatically transform an interpreter into a compiler. However, none of -them have lead to substantial speedups for real-world languages. We -believe that the missing key insight is to use partial evaluation to -produce just-in-time compilers, rather than classical ahead-of-time -compilers. If this turns out to be correct, the practical speed of -dynamic languages could be vastly improved. - -All these previous JIT compiler generators were producing JIT compilers -similar to the hand-written Psyco. But today, starting from 2009, our -prototype is no longer using partial evaluation -- at least not in a way -that would convince paper reviewers. It is instead based on the notion -of *tracing JIT,* recently studied for Java and JavaScript. When -compared to all existing tracing JITs so far, however, partial -evaluation gives us some extra techniques that we already had in our -previous JIT generators, notably how to optimize structures by removing -allocations. - -The closest comparison to our current JIT is Tamarin's TraceMonkey. -However, this JIT compiler is written manually, which is quite some -effort. In PyPy, we write a JIT generator at the level of RPython, -which means that our final JIT does not have to -- indeed, cannot -- be -written to encode all the details of the full Python language. These -details are automatically supplied by the fact that we have an -interpreter for full Python. - - -Practical results ------------------ - -The JIT compilers that we generate use some techniques that are not in -widespread use so far, but they are not exactly new either. The point -we want to make here is not that we are pushing the theoretical limits -of how fast a given dynamic language can be run. Our point is: we are -making it **practical** to have reasonably good Just-In-Time compilers -for all dynamic languages, no matter how complicated or non-widespread -(e.g. Open Source dynamic languages without large industry or academic -support, or internal domain-specific languages). By practical we mean -that this should be: - -* Easy: requires little more efforts than writing the interpreter in the - first place. - -* Maintainable: our generated JIT compilers are not separate projects - (we do not generate separate source code, but only throw-away C code - that is compiled into the generated VM). In other words, the whole - JIT compiler is regenerated anew every time the high-level interpreter - is modified, so that they cannot get out of sync no matter how fast - the language evolves. - -* Fast enough: we can get some rather good performance out of the - generated JIT compilers. That's the whole point, of course. - - -Alternative approaches to improve speed -======================================================================== - -+----------------------------------------------------------------------+ -| :NOTE: | -| | -| Please take the following section as just a statement of opinion. | -| In order to be debated over, the summaries should first be | -| expanded into full arguments. We include them here as links; | -| we are aware of them, even if sometimes pessimistic about them | -| ``:-)`` | -+----------------------------------------------------------------------+ - -There are a large number of approaches to improving the execution speed of -dynamic programming languages, most of which only produce small improvements -and none offer the flexibility and customisability provided by our approach. -Over the last 6 years of tweaking, the speed of CPython has only improved by a -factor of 1.3 or 1.4 (depending on benchmarks). Many tweaks are applicable to -PyPy as well. Indeed, some of the CPython tweaks originated as tweaks for PyPy. - -IronPython initially achieved a speed of about 1.8 times that of CPython by -leaving out some details of the language and by leveraging the large investment -that Microsoft has put into making the .NET platform fast; the current, more -complete implementation has roughly the same speed as CPython. In general, the -existing approaches have reached the end of the road, speed-wise. Microsoft's -Dynamic Language Runtime (DLR), often cited in this context, is essentially -only an API to make the techniques pioneered in IronPython official. At best, -it will give another small improvement. - -Another technique regularly mentioned is adding types to the language in order -to speed it up: either explicit optional typing or soft typing (i.e., inferred -"likely" types). For Python, all projects in this area have started with a -simplified subset of the language; no project has scaled up to anything close -to the complete language. This would be a major effort and be platform- and -language-specific. Moreover maintenance would be a headache: we believe that -many changes that are trivial to implement in CPython, are likely to invalidate -previous carefully-tuned optimizations. - -For major improvements in speed, JIT techniques are necessary. For Python, -Psyco gives typical speedups of 2 to 4 times - up to 100 times in algorithmic -examples. It has come to a dead end because of the difficulty and huge costs -associated with developing and maintaining it. It has a relatively poor -encoding of language semantics - knowledge about Python behavior needs to be -encoded by hand and kept up-to-date. At least, Psyco works correctly even when -encountering one of the numerous Python constructs it does not support, by -falling back to CPython. The PyPy JIT started out as a metaprogrammatic, -non-language-specific equivalent of Psyco. - -A different kind of prior art are self-hosting JIT compilers such as Jikes. -Jikes is a JIT compiler for Java written in Java. It has a poor encoding of -language semantics; it would take an enormous amount of work to encode all the -details of a Python-like language directly into a JIT compiler. It also has -limited portability, which is an issue for Python; it is likely that large -parts of the JIT compiler would need retargetting in order to run in a -different environment than the intended low-level one. - -Simply reusing an existing well-tuned JIT like that of the JVM does not -really work, because of concept mismatches between the implementor's -language and the host VM language: the former needs to be compiled to -the target environment in such a way that the JIT is able to speed it up -significantly - an approach which essentially has failed in Python so -far: even though CPython is a simple interpreter, its Java and .NET -re-implementations are not significantly faster. - -More recently, several larger projects have started in the JIT area. For -instance, Sun Microsystems is investing in JRuby, which aims to use the Java -Hotspot JIT to improve the performance of Ruby. However, this requires a lot of -hand crafting and will only provide speedups for one language on one platform. -Some issues are delicate, e.g., how to remove the overhead of constantly boxing -and unboxing, typical in dynamic languages. An advantage compared to PyPy is -that there are some hand optimizations that can be performed, that do not fit -in the metaprogramming approach. But metaprogramming makes the PyPy JIT -reusable for many different languages on many different execution platforms. -It is also possible to combine the approaches - we can get substantial speedups -using our JIT and then feed the result to Java's Hotspot JIT for further -improvement. One of us is even a member of the `JSR 292`_ Expert Group -to define additions to the JVM to better support dynamic languages, and -is contributing insights from our JIT research, in ways that will also -benefit PyPy. - -Finally, tracing JITs are now emerging for dynamic languages like -JavaScript with TraceMonkey. The code generated by PyPy is very similar -(but not hand-written) to the concepts of tracing JITs. - - -Further reading -======================================================================== - -The description of the current PyPy JIT generator is given in PyJitPl5_ -(draft). - -.. _`JSR 292`: http://jcp.org/en/jsr/detail?id=292 -.. _PyJitPl5: pyjitpl5.html diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: -.. sectnum:: - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: -.. sectnum:: - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: -.. sectnum:: - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Thu Mar 17 19:04:13 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Thu, 17 Mar 2011 19:04:13 +0100 (CET) Subject: [pypy-svn] pypy default: merge code changes Message-ID: <20110317180413.5369A282BD7@codespeak.net> Author: Alex Perry Branch: Changeset: r42749:764360b919fe Date: 2011-03-14 19:12 +0000 http://bitbucket.org/pypy/pypy/changeset/764360b919fe/ Log: merge code changes From commits-noreply at bitbucket.org Thu Mar 17 19:04:13 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Thu, 17 Mar 2011 19:04:13 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110317180413.ABBF0282BD6@codespeak.net> Author: Alex Perry Branch: Changeset: r42750:03818a6ec9dd Date: 2011-03-14 19:17 +0000 http://bitbucket.org/pypy/pypy/changeset/03818a6ec9dd/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 17 19:04:13 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Thu, 17 Mar 2011 19:04:13 +0100 (CET) Subject: [pypy-svn] pypy default: merge Message-ID: <20110317180413.EB23A282BD7@codespeak.net> Author: Alex Perry Branch: Changeset: r42751:a293f1412c5b Date: 2011-03-14 19:21 +0000 http://bitbucket.org/pypy/pypy/changeset/a293f1412c5b/ Log: merge From commits-noreply at bitbucket.org Thu Mar 17 19:04:15 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Thu, 17 Mar 2011 19:04:15 +0100 (CET) Subject: [pypy-svn] pypy default: minor clarifications to getting started instructions Message-ID: <20110317180415.34E90282BD6@codespeak.net> Author: Alex Perry Branch: Changeset: r42752:e82074493f34 Date: 2011-03-14 19:33 +0000 http://bitbucket.org/pypy/pypy/changeset/e82074493f34/ Log: minor clarifications to getting started instructions diff --git a/pypy/doc/getting-started.rst b/pypy/doc/getting-started.rst --- a/pypy/doc/getting-started.rst +++ b/pypy/doc/getting-started.rst @@ -33,7 +33,9 @@ .. _`downloading them from the download page`: download.html -If you choose to use mercurial, you must issue the following command on your +If you choose to use mercurial, +first make sure you have ``subversion`` installed. +You must issue the following command on your command line, DOS box, or terminal:: hg clone http://bitbucket.org/pypy/pypy pypy diff --git a/pypy/doc/externaltools.rst b/pypy/doc/externaltools.rst --- a/pypy/doc/externaltools.rst +++ b/pypy/doc/externaltools.rst @@ -16,6 +16,8 @@ - gcc + - make + - Some libraries (these are Debian package names, adapt as needed): * ``python-dev`` diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -39,6 +39,7 @@ adapt as needed): * ``gcc`` + * ``make`` * ``python-dev`` * ``python-ctypes`` if you are still using Python2.4 * ``libffi-dev`` @@ -49,6 +50,7 @@ * ``libexpat1-dev`` (for the optional ``pyexpat`` module) * ``libssl-dev`` (for the optional ``_ssl`` module) * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) + * ``python-sphinx`` (for the optional documentation build) 2. Translation is somewhat time-consuming (30 min to over one hour) and RAM-hungry. If you have less than 1.5 GB of @@ -68,8 +70,7 @@ possibly replacing ``--opt=jit`` with another `optimization level`_ of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) + compiler. As of March 2011, Intel **32-bit** environment needs ``4GB``. .. _`optimization level`: config/opt.html From commits-noreply at bitbucket.org Thu Mar 17 19:04:15 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:15 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, jacob): add handy commands for installing build-time deps on Debian and Fedora boxes Message-ID: <20110317180415.F2AB4282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42753:49ee2df89cf3 Date: 2011-03-14 17:07 -0400 http://bitbucket.org/pypy/pypy/changeset/49ee2df89cf3/ Log: (dmalcolm, jacob): add handy commands for installing build-time deps on Debian and Fedora boxes Also, removed references to python-ctypes, as translation under 2.4 no longer works diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -35,21 +35,27 @@ You can translate the whole of PyPy's Python interpreter to low level C code, `CLI code`_, or `JVM code`_. -1. Install dependencies. You need (these are Debian package names, - adapt as needed): +1. Install build-time dependencies. On a Debian box these are:: - * ``gcc`` - * ``make`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` + [user at debian-box ~]$ sudo apt-get install \ + gcc make python-dev libffi-dev pkg-config \ + libz-dev libbz2-dev libncurses-dev libexpat1-dev libssl-dev libgc-dev python-sphinx + + On a Fedora box these are:: + + [user at fedora-or-rh-box ~]$ sudo yum install \ + gcc make python-devel libffi-devel pkg-config \ + zlib-devel bzip2-devel ncurses-devel expat-devel openssl-devel gc-devel python-sphinx + + The above command lines are split with continuation characters, giving the necessary dependencies first, then the optional ones. + * ``pkg-config`` (to help us locate libffi files) * ``libz-dev`` (for the optional ``zlib`` module) * ``libbz2-dev`` (for the optional ``bz2`` module) * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) * ``libexpat1-dev`` (for the optional ``pyexpat`` module) * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) + * ``libgc-dev`` (for the Boehm garbage collector: only needed when translating with `--opt=0, 1` or `size`) * ``python-sphinx`` (for the optional documentation build) 2. Translation is somewhat time-consuming (30 min to From commits-noreply at bitbucket.org Thu Mar 17 19:04:22 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:22 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac, jacob): Move various old content into new holding areas to try to shorten the main index Message-ID: <20110317180422.D0E2E282BD7@codespeak.net> Author: David Malcolm Branch: Changeset: r42754:16c40350d347 Date: 2011-03-14 17:36 -0400 http://bitbucket.org/pypy/pypy/changeset/16c40350d347/ Log: (dmalcolm, lac, jacob): Move various old content into new holding areas to try to shorten the main index diff --git a/pypy/doc/dot-net.rst b/pypy/doc/dot-net.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/dot-net.rst @@ -0,0 +1,12 @@ +.NET support +============ + + .. warning:: + + The .NET backend within PyPy is unmaintained. This documentation may be out-of-date. We welcome contributors who are interested in doing the work to get this into shape. + +.. toctree:: + + cli-backend.rst + clr-module.rst + carbonpython.rst diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -8,11 +8,12 @@ Rumors have it that the secret goal is being faster-than-C which is nonsense, isn't it? `more...`_ + +.. The following documentation is important and reasonably up-to-date: + .. toctree:: :maxdepth: 2 - .. STUFF THAT'S BEEN THROUGH 1ST PASS CATEGORIZATION: - .. The following stuff is high-value and (vaguely) true: getting-started.rst getting-started-python.rst @@ -51,6 +52,8 @@ jit/overview.rst jit/pyjitpl5.rst + index-of-release-notes.rst + ctypes-implementation.rst .. ^^ needs attention @@ -66,102 +69,6 @@ stackless.rst .. ^^ it still works; needs JIT integration; hasn't been maintained for years - .. The following stuff is good material relating to unmaintained areas of the project: - .. .Net stuff: - cli-backend.rst - clr-module.rst - carbonpython.rst - - .. Release notes: - release-0.6.rst - release-0.7.0.rst - release-0.8.0.rst - release-0.9.0.rst - release-0.99.0.rst - release-1.0.0.rst - release-1.1.0.rst - release-1.2.0.rst - release-1.3.0.rst - release-1.4.0.rst - release-1.4.0beta.rst - release-1.4.1.rst - - - .. The following stuff is old (and crufty?), and needs further investigation: - buildtool.rst - distribution.rst - eventhistory.rst - .. ^^ Incomplete, superceded elsewhere - - externaltools.rst - .. ^^ Incomplete and wrong, superceded elsewhere - - geninterp.rst - .. ^^ apparently dead - - objspace-proxies.rst - - old_news.rst - - sprint-reports.rst - - project-ideas.rst - - rffi.rst - - sandbox.rst - .. ^^ it continues to work, but is unmaintained - - statistic/index.rst - - theory.rst - .. ^^ old ideas; we're not doing it this way any more - - translation-aspects.rst - .. ^^ old and needs updating - - .. This needs merging somehow: - docindex.rst - - .. Needs merging/replacing with hg stuff: - svn-help.rst - - .. The following discussions have not yet been categorized: - - discussion/GC-performance.rst - discussion/VM-integration.rst - discussion/chained_getattr.rst - discussion/cli-optimizations.rst - discussion/cmd-prompt-translation.rst - discussion/compiled-swamp.rst - discussion/ctypes_modules.rst - discussion/ctypes_todo.rst - discussion/distribution.rst - discussion/distribution-implementation.rst - discussion/distribution-newattempt.rst - discussion/distribution-roadmap.rst - discussion/emptying-the-malloc-zoo.rst - discussion/finalizer-order.rst - discussion/gc.rst - discussion/howtoimplementpickling.rst - discussion/improve-rpython.rst - discussion/outline-external-ootype.rst - discussion/oz-thread-api.rst - discussion/paper-wishlist.rst - discussion/parsing-ideas.rst - discussion/pypy_metaclasses_in_cl.rst - discussion/removing-stable-compiler.rst - discussion/security-ideas.rst - discussion/somepbc-refactoring-plan.rst - discussion/summer-of-pypy-pytest.rst - discussion/testing-zope.rst - discussion/thoughts_string_interning.rst - discussion/translation-swamp.rst - discussion/use_case_of_logic.rst - - .. STUFF THAT'S DIFFICULT TO CATEGORIZE - video-index.rst - Getting into PyPy ... ============================================= @@ -216,6 +123,7 @@ .. _papers: extradoc.html .. _`Release 1.4`: http://pypy.org/download.html + Indices and tables ================== diff --git a/pypy/doc/index-of-release-notes.rst b/pypy/doc/index-of-release-notes.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/index-of-release-notes.rst @@ -0,0 +1,17 @@ +Historical release notes +------------------------ + +.. toctree:: + + release-0.6 + release-0.7.0.rst + release-0.8.0.rst + release-0.9.0.rst + release-0.99.0.rst + release-1.0.0.rst + release-1.1.0.rst + release-1.2.0.rst + release-1.3.0.rst + release-1.4.0.rst + release-1.4.0beta.rst + release-1.4.1.rst diff --git a/pypy/doc/cleanup.rst b/pypy/doc/cleanup.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/cleanup.rst @@ -0,0 +1,87 @@ +Old documentation that needs review +----------------------------------- +.. toctree:: + + .. The following stuff is old (and crufty?), and needs further investigation: + buildtool.rst + distribution.rst + eventhistory.rst + .. ^^ Incomplete, superceded elsewhere + + externaltools.rst + .. ^^ Incomplete and wrong, superceded elsewhere + + geninterp.rst + .. ^^ apparently dead + + objspace-proxies.rst + + old_news.rst + + sprint-reports.rst + + project-ideas.rst + + rffi.rst + + sandbox.rst + .. ^^ it continues to work, but is unmaintained + + statistic/index.rst + + theory.rst + .. ^^ old ideas; we're not doing it this way any more + + translation-aspects.rst + .. ^^ old and needs updating + + .. This needs merging somehow: + docindex.rst + + .. Needs merging/replacing with hg stuff: + svn-help.rst + + dot-net.rst + +Old discussion notes needing categorization +------------------------------------------- +.. toctree:: + + .. The following discussions have not yet been categorized: + + discussion/GC-performance.rst + discussion/VM-integration.rst + discussion/chained_getattr.rst + discussion/cli-optimizations.rst + discussion/cmd-prompt-translation.rst + discussion/compiled-swamp.rst + discussion/ctypes_modules.rst + discussion/ctypes_todo.rst + discussion/distribution.rst + discussion/distribution-implementation.rst + discussion/distribution-newattempt.rst + discussion/distribution-roadmap.rst + discussion/emptying-the-malloc-zoo.rst + discussion/finalizer-order.rst + discussion/gc.rst + discussion/howtoimplementpickling.rst + discussion/improve-rpython.rst + discussion/outline-external-ootype.rst + discussion/oz-thread-api.rst + discussion/paper-wishlist.rst + discussion/parsing-ideas.rst + discussion/pypy_metaclasses_in_cl.rst + discussion/removing-stable-compiler.rst + discussion/security-ideas.rst + discussion/somepbc-refactoring-plan.rst + discussion/summer-of-pypy-pytest.rst + discussion/testing-zope.rst + discussion/thoughts_string_interning.rst + discussion/translation-swamp.rst + discussion/use_case_of_logic.rst + + .. STUFF THAT'S DIFFICULT TO CATEGORIZE + video-index.rst + + + From commits-noreply at bitbucket.org Thu Mar 17 19:04:28 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:28 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Various minor cleanups (removal of stray "PyPy -" from titles, reorderings of pages) Message-ID: <20110317180428.9E500282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42755:47ed8f9d826b Date: 2011-03-16 14:14 -0400 http://bitbucket.org/pypy/pypy/changeset/47ed8f9d826b/ Log: (dmalcolm, lac): Various minor cleanups (removal of stray "PyPy -" from titles, reorderings of pages) diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -1,5 +1,5 @@ =============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process +Getting Started with the Translation Toolchain and Development Process =============================================================================== .. contents:: diff --git a/pypy/doc/buildtool.rst b/pypy/doc/buildtool.rst --- a/pypy/doc/buildtool.rst +++ b/pypy/doc/buildtool.rst @@ -2,6 +2,8 @@ PyPyBuilder ============ +.. include:: crufty.rst + What is this? ============= diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -12,17 +12,18 @@ .. The following documentation is important and reasonably up-to-date: .. toctree:: - :maxdepth: 2 + :maxdepth: 1 .. The following stuff is high-value and (vaguely) true: getting-started.rst getting-started-python.rst getting-started-dev.rst + windows.rst faq.rst architecture.rst coding-guide.rst + cpython_differences.rst cleanup-todo.rst - cpython_differences.rst garbage_collection.rst interpreter.rst objspace.rst @@ -30,7 +31,6 @@ dev_method.rst download.rst extending.rst - windows.rst extradoc.rst .. ^^ integrate this one level up: dcolish? diff --git a/pypy/doc/interpreter.rst b/pypy/doc/interpreter.rst --- a/pypy/doc/interpreter.rst +++ b/pypy/doc/interpreter.rst @@ -1,5 +1,5 @@ =================================== -PyPy - Bytecode Interpreter +Bytecode Interpreter =================================== .. contents:: diff --git a/pypy/doc/architecture.rst b/pypy/doc/architecture.rst --- a/pypy/doc/architecture.rst +++ b/pypy/doc/architecture.rst @@ -1,5 +1,5 @@ ================================================== -PyPy - Goals and Architecture Overview +Goals and Architecture Overview ================================================== .. contents:: diff --git a/pypy/doc/extradoc.rst b/pypy/doc/extradoc.rst --- a/pypy/doc/extradoc.rst +++ b/pypy/doc/extradoc.rst @@ -1,5 +1,5 @@ ================================================= -PyPy - papers, talks and related projects +Papers, talks and related projects ================================================= Papers diff --git a/pypy/doc/getting-started.rst b/pypy/doc/getting-started.rst --- a/pypy/doc/getting-started.rst +++ b/pypy/doc/getting-started.rst @@ -1,7 +1,9 @@ ================================== -PyPy - Getting Started +Getting Started ================================== +.. contents:: + .. _howtopypy: What is PyPy ? diff --git a/pypy/doc/windows.rst b/pypy/doc/windows.rst --- a/pypy/doc/windows.rst +++ b/pypy/doc/windows.rst @@ -1,6 +1,6 @@ -============= -Windows Hints -============= +=============== +PyPy on Windows +=============== Pypy is supported on Windows platforms, starting with Windows 2000. The following text gives some hints about how to translate the PyPy diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst --- a/pypy/doc/coding-guide.rst +++ b/pypy/doc/coding-guide.rst @@ -1,5 +1,5 @@ ===================================== -PyPy - Coding Guide +Coding Guide ===================================== .. contents:: diff --git a/pypy/doc/objspace.rst b/pypy/doc/objspace.rst --- a/pypy/doc/objspace.rst +++ b/pypy/doc/objspace.rst @@ -1,5 +1,5 @@ ====================== -PyPy - Object Spaces +Object Spaces ====================== .. contents:: diff --git a/pypy/doc/jit/overview.rst b/pypy/doc/jit/overview.rst --- a/pypy/doc/jit/overview.rst +++ b/pypy/doc/jit/overview.rst @@ -3,7 +3,6 @@ ------------------------------------------------------------------------ .. contents:: -.. sectnum:: This is a non-technical introduction and motivation for PyPy's approach to Just-In-Time compiler generation. From commits-noreply at bitbucket.org Thu Mar 17 19:04:29 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:29 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Add crufty.rst, for marking pages in need of love Message-ID: <20110317180429.8A74C282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42756:1f0bd4422e24 Date: 2011-03-16 14:14 -0400 http://bitbucket.org/pypy/pypy/changeset/1f0bd4422e24/ Log: (dmalcolm, lac): Add crufty.rst, for marking pages in need of love diff --git a/pypy/doc/crufty.rst b/pypy/doc/crufty.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/crufty.rst @@ -0,0 +1,3 @@ +.. warning:: + + This documentation may be out-of-date or obsolete (identified on 2011-03-14 at the PyCon US sprint) From commits-noreply at bitbucket.org Thu Mar 17 19:04:32 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:32 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): attempt to fix links to source code within sphinx-generated HTML Message-ID: <20110317180432.A74A5282BDE@codespeak.net> Author: David Malcolm Branch: Changeset: r42757:c6f7ecf2dc01 Date: 2011-03-16 14:41 -0400 http://bitbucket.org/pypy/pypy/changeset/c6f7ecf2dc01/ Log: (dmalcolm, lac): attempt to fix links to source code within sphinx- generated HTML Sphinx generates the html in _build/html, which means that all the links relative to pypy/doc break within the html files. The real fix appears to be to write a plugin to sphinx to handle this more gracefully, perhaps based on http://sphinx.pocoo.org/ext/viewcode.html For now, this changeset merely tries to consolidate all links to source code files to be of the form ../../../../pypy This was achieved using these sed commands: sed -i -e"s|: \.\./objspace/|: ../../../../pypy/objspace/|" *.rst sed -i -e"s|: \.\./\.\./pypy/|: ../../../../pypy/|" *.rst sed -i -e"s|\.\./\.\./pypy/|../../../../pypy/|" getting-started-dev.rst to avoid accidentally modifying paths in other places (e.g. usage examples) diff --git a/pypy/doc/geninterp.rst b/pypy/doc/geninterp.rst --- a/pypy/doc/geninterp.rst +++ b/pypy/doc/geninterp.rst @@ -42,7 +42,7 @@ Example +++++++ -.. _implementation: ../../pypy/translator/geninterplevel.py +.. _implementation: ../../../../pypy/translator/geninterplevel.py Let's try a little example. You might want to look at the flowgraph that it produces. Here, we directly run the Python translation and look at the diff --git a/pypy/doc/objspace-proxies.rst b/pypy/doc/objspace-proxies.rst --- a/pypy/doc/objspace-proxies.rst +++ b/pypy/doc/objspace-proxies.rst @@ -607,9 +607,9 @@ lists, dicts, exceptions, tracebacks and frames. .. _`standard object space`: objspace.html#the-standard-object-space -.. _`proxy_helpers.py`: ../../pypy/objspace/std/proxy_helpers.py -.. _`proxyobject.py`: ../../pypy/objspace/std/proxyobject.py -.. _`transparent.py`: ../../pypy/objspace/std/transparent.py +.. _`proxy_helpers.py`: ../../../../pypy/objspace/std/proxy_helpers.py +.. _`proxyobject.py`: ../../../../pypy/objspace/std/proxyobject.py +.. _`transparent.py`: ../../../../pypy/objspace/std/transparent.py .. _`tputil.py`: ../../lib_pypy/tputil.py .. [D12.1] `High-Level Backends and Interpreter Feature Prototypes`, PyPy diff --git a/pypy/doc/translation.rst b/pypy/doc/translation.rst --- a/pypy/doc/translation.rst +++ b/pypy/doc/translation.rst @@ -107,7 +107,7 @@ .. _`abstract interpretation`: theory.html#abstract-interpretation .. _`Flow Object Space`: objspace.html#the-flow-object-space .. _`interactive interface`: getting-started-dev.html#try-out-the-translator -.. _`translatorshell.py`: ../../pypy/bin/translatorshell.py +.. _`translatorshell.py`: ../../../../pypy/bin/translatorshell.py .. _`flow model`: .. _`control flow graphs`: @@ -274,7 +274,7 @@ should not attempt to actually mutate such Constants. .. _`document describing object spaces`: objspace.html -.. _`pypy.objspace.flow.model`: ../objspace/flow/model.py +.. _`pypy.objspace.flow.model`: ../../../../pypy/objspace/flow/model.py .. _Annotator: diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -410,11 +410,11 @@ .. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py .. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py .. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py +.. _objspace.py: ../../../../pypy/objspace/std/objspace.py +.. _thunk: ../../../../pypy/objspace/thunk.py +.. _trace: ../../../../pypy/objspace/trace.py +.. _flow: ../../../../pypy/objspace/flow/ +.. _translator.py: ../../../../pypy/translator/translator.py .. _mailing lists: index.html .. _documentation: docindex.html .. _unit tests: coding-guide.html#test-design diff --git a/pypy/doc/_ref.rst b/pypy/doc/_ref.rst --- a/pypy/doc/_ref.rst +++ b/pypy/doc/_ref.rst @@ -3,48 +3,48 @@ .. _`lib-python/`: ../../lib-python .. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py .. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion +.. _`pypy/annotation`: ../../../../pypy/annotation +.. _`pypy/annotation/annrpython.py`: ../../../../pypy/annotation/annrpython.py +.. _`annotation/binaryop.py`: ../../../../pypy/annotation/binaryop.py +.. _`pypy/annotation/builtin.py`: ../../../../pypy/annotation/builtin.py +.. _`pypy/annotation/model.py`: ../../../../pypy/annotation/model.py +.. _`bin/`: ../../../../pypy/bin +.. _`config/`: ../../../../pypy/config +.. _`pypy/config/pypyoption.py`: ../../../../pypy/config/pypyoption.py +.. _`doc/`: ../../../../pypy/doc +.. _`doc/config/`: ../../../../pypy/doc/config +.. _`doc/discussion/`: ../../../../pypy/doc/discussion .. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py +.. _`pypy/interpreter`: ../../../../pypy/interpreter +.. _`pypy/interpreter/argument.py`: ../../../../pypy/interpreter/argument.py .. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py +.. _`pypy/interpreter/astcompiler`: ../../../../pypy/interpreter/astcompiler +.. _`pypy/interpreter/executioncontext.py`: ../../../../pypy/interpreter/executioncontext.py +.. _`pypy/interpreter/function.py`: ../../../../pypy/interpreter/function.py .. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py +.. _`pypy/interpreter/gateway.py`: ../../../../pypy/interpreter/gateway.py +.. _`pypy/interpreter/generator.py`: ../../../../pypy/interpreter/generator.py +.. _`pypy/interpreter/mixedmodule.py`: ../../../../pypy/interpreter/mixedmodule.py +.. _`pypy/interpreter/module.py`: ../../../../pypy/interpreter/module.py +.. _`pypy/interpreter/nestedscope.py`: ../../../../pypy/interpreter/nestedscope.py +.. _`pypy/interpreter/pyopcode.py`: ../../../../pypy/interpreter/pyopcode.py .. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py +.. _`pypy/interpreter/pyparser`: ../../../../pypy/interpreter/pyparser +.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../../../pypy/interpreter/pyparser/pytokenizer.py +.. _`pypy/interpreter/pyparser/parser.py`: ../../../../pypy/interpreter/pyparser/parser.py +.. _`pypy/interpreter/pyparser/pyparse.py`: ../../../../pypy/interpreter/pyparser/pyparse.py +.. _`pypy/interpreter/pyparser/future.py`: ../../../../pypy/interpreter/pyparser/future.py +.. _`pypy/interpreter/pyparser/metaparser.py`: ../../../../pypy/interpreter/pyparser/metaparser.py +.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../../../pypy/interpreter/astcompiler/astbuilder.py +.. _`pypy/interpreter/astcompiler/optimize.py`: ../../../../pypy/interpreter/astcompiler/optimize.py +.. _`pypy/interpreter/astcompiler/codegen.py`: ../../../../pypy/interpreter/astcompiler/codegen.py +.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../../../pypy/interpreter/astcompiler/tools/asdl_py.py +.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../../../pypy/interpreter/astcompiler/tools/Python.asdl +.. _`pypy/interpreter/astcompiler/assemble.py`: ../../../../pypy/interpreter/astcompiler/assemble.py +.. _`pypy/interpreter/astcompiler/symtable.py`: ../../../../pypy/interpreter/astcompiler/symtable.py +.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../../../pypy/interpreter/astcompiler/asthelpers.py +.. _`pypy/interpreter/astcompiler/ast.py`: ../../../../pypy/interpreter/astcompiler/ast.py +.. _`pypy/interpreter/typedef.py`: ../../../../pypy/interpreter/typedef.py .. _`lib/`: .. _`lib_pypy/`: ../../lib_pypy .. _`lib/distributed/`: ../../lib_pypy/distributed @@ -52,56 +52,56 @@ .. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test .. _`module/`: .. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py +.. _`pypy/module/`: ../../../../pypy/module +.. _`pypy/module/__builtin__/__init__.py`: ../../../../pypy/module/__builtin__/__init__.py +.. _`pypy/module/_stackless/test/test_clonable.py`: ../../../../pypy/module/_stackless/test/test_clonable.py +.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../../../pypy/module/_stackless/test/test_composable_coroutine.py .. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow +.. _`pypy/objspace`: ../../../../pypy/objspace +.. _`objspace/dump.py`: ../../../../pypy/objspace/dump.py +.. _`objspace/flow/`: ../../../../pypy/objspace/flow .. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py +.. _`pypy/objspace/std`: ../../../../pypy/objspace/std +.. _`objspace/taint.py`: ../../../../pypy/objspace/taint.py .. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py +.. _`pypy/objspace/thunk.py`: ../../../../pypy/objspace/thunk.py .. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py +.. _`pypy/objspace/trace.py`: ../../../../pypy/objspace/trace.py .. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test +.. _`rlib/`: ../../../../pypy/rlib +.. _`pypy/rlib/rarithmetic.py`: ../../../../pypy/rlib/rarithmetic.py +.. _`pypy/rlib/test`: ../../../../pypy/rlib/test .. _`pypy/rpython`: .. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem +.. _`rpython/`: ../../../../pypy/rpython +.. _`rpython/lltypesystem/`: ../../../../pypy/rpython/lltypesystem .. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest +.. _`rpython/lltypesystem/lltype.py`: ../../../../pypy/rpython/lltypesystem/lltype.py +.. _`rpython/memory/`: ../../../../pypy/rpython/memory +.. _`rpython/memory/gc/generation.py`: ../../../../pypy/rpython/memory/gc/generation.py +.. _`rpython/memory/gc/hybrid.py`: ../../../../pypy/rpython/memory/gc/hybrid.py +.. _`rpython/memory/gc/markcompact.py`: ../../../../pypy/rpython/memory/gc/markcompact.py +.. _`rpython/memory/gc/marksweep.py`: ../../../../pypy/rpython/memory/gc/marksweep.py +.. _`rpython/memory/gc/semispace.py`: ../../../../pypy/rpython/memory/gc/semispace.py +.. _`rpython/ootypesystem/`: ../../../../pypy/rpython/ootypesystem +.. _`rpython/ootypesystem/ootype.py`: ../../../../pypy/rpython/ootypesystem/ootype.py +.. _`rpython/rint.py`: ../../../../pypy/rpython/rint.py +.. _`rpython/rlist.py`: ../../../../pypy/rpython/rlist.py +.. _`rpython/rmodel.py`: ../../../../pypy/rpython/rmodel.py +.. _`pypy/rpython/rtyper.py`: ../../../../pypy/rpython/rtyper.py +.. _`pypy/rpython/test/test_llinterp.py`: ../../../../pypy/rpython/test/test_llinterp.py +.. _`pypy/test_all.py`: ../../../../pypy/test_all.py +.. _`tool/`: ../../../../pypy/tool +.. _`tool/algo/`: ../../../../pypy/tool/algo +.. _`tool/pytest/`: ../../../../pypy/tool/pytest .. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool +.. _`translator/`: ../../../../pypy/translator +.. _`translator/backendopt/`: ../../../../pypy/translator/backendopt +.. _`translator/c/`: ../../../../pypy/translator/c +.. _`translator/cli/`: ../../../../pypy/translator/cli +.. _`translator/goal/`: ../../../../pypy/translator/goal +.. _`pypy/translator/goal/targetnopstandalone.py`: ../../../../pypy/translator/goal/targetnopstandalone.py +.. _`translator/jvm/`: ../../../../pypy/translator/jvm +.. _`translator/stackless/`: ../../../../pypy/translator/stackless +.. _`translator/tool/`: ../../../../pypy/translator/tool .. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/rlib.rst b/pypy/doc/rlib.rst --- a/pypy/doc/rlib.rst +++ b/pypy/doc/rlib.rst @@ -14,8 +14,8 @@ to change at some point. Usually it is useful to look at the tests in `pypy/rlib/test`_ to get an impression of how to use a module. -.. _`pypy/rlib`: ../../pypy/rlib -.. _`pypy/rlib/test`: ../../pypy/rlib/test +.. _`pypy/rlib`: ../../../../pypy/rlib +.. _`pypy/rlib/test`: ../../../../pypy/rlib/test ``listsort`` ============ @@ -29,7 +29,7 @@ be sorted using the ``listsort`` module in one program, otherwise the annotator will be confused. -.. _listsort: ../../pypy/rlib/listsort.py +.. _listsort: ../../../../pypy/rlib/listsort.py ``nonconst`` ============ @@ -41,7 +41,7 @@ ``NonConst`` will behave during annotation like that value, but no constant folding will happen. -.. _nonconst: ../../pypy/rlib/nonconst.py +.. _nonconst: ../../../../pypy/rlib/nonconst.py .. _`flow object space`: objspace.html#the-flow-object-space .. _`annotator`: translation.html#the-annotation-pass @@ -95,7 +95,7 @@ won't be allocated but represented by *tagged pointers**, that is pointers that have the lowest bit set. -.. _objectmodel: ../../pypy/rlib/objectmodel.py +.. _objectmodel: ../../../../pypy/rlib/objectmodel.py ``rarithmetic`` @@ -105,7 +105,7 @@ in the behaviour of arithmetic code in regular Python and RPython code. Most of them are already described in the `coding guide`_ -.. _rarithmetic: ../../pypy/rlib/rarithmetic.py +.. _rarithmetic: ../../../../pypy/rlib/rarithmetic.py .. _`coding guide`: coding-guide.html @@ -122,7 +122,7 @@ these underscores left out for better readability (so ``a.add(b)`` can be used to add two rbigint instances). -.. _rbigint: ../../pypy/rlib/rbigint.py +.. _rbigint: ../../../../pypy/rlib/rbigint.py ``rrandom`` @@ -133,7 +133,7 @@ ``random`` method which returns a pseudo-random floating point number between 0.0 and 1.0. -.. _rrandom: ../../pypy/rlib/rrandom.py +.. _rrandom: ../../../../pypy/rlib/rrandom.py ``rsocket`` =========== @@ -145,7 +145,7 @@ so on, which is not suitable for RPython. Instead, ``rsocket`` contains a hierarchy of Address classes, in a typical static-OO-programming style. -.. _rsocket: ../../pypy/rlib/rsocket.py +.. _rsocket: ../../../../pypy/rlib/rsocket.py ``rstack`` @@ -210,7 +210,7 @@ f() -.. _rstack: ../../pypy/rlib/rstack.py +.. _rstack: ../../../../pypy/rlib/rstack.py ``streamio`` @@ -220,7 +220,7 @@ by Guido van Rossum as `sio.py`_ in the CPython sandbox as a prototype for the upcoming new file implementation in Python 3000). -.. _streamio: ../../pypy/rlib/streamio.py +.. _streamio: ../../../../pypy/rlib/streamio.py .. _`sio.py`: http://svn.python.org/view/sandbox/trunk/sio/sio.py ``unroll`` @@ -230,7 +230,7 @@ which wraps an iterator. Looping over the iterator in RPython code will not produce a loop in the resulting flow graph but will unroll the loop instead. -.. _unroll: ../../pypy/rlib/unroll.py +.. _unroll: ../../../../pypy/rlib/unroll.py ``parsing`` =========== @@ -359,7 +359,7 @@ of the nonterminal and ``children`` which is a list of the children attributes. -.. _`pypy.rlib.parsing.tree`: ../../pypy/rlib/parsing/tree.py +.. _`pypy.rlib.parsing.tree`: ../../../../pypy/rlib/parsing/tree.py Visitors ++++++++ @@ -531,5 +531,5 @@ .. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _parsing: ../../pypy/rlib/parsing/ +.. _parsing: ../../../../pypy/rlib/parsing/ .. _`json format`: http://www.json.org diff --git a/pypy/doc/coding-guide.rst b/pypy/doc/coding-guide.rst --- a/pypy/doc/coding-guide.rst +++ b/pypy/doc/coding-guide.rst @@ -354,7 +354,7 @@ silent wrap-around. Whenever we need more control, we use the following helpers (which live the `pypy/rlib/rarithmetic.py`_): -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py +.. _`pypy/rlib/rarithmetic.py`: ../../../../pypy/rlib/rarithmetic.py **ovfcheck()** diff --git a/pypy/doc/objspace.rst b/pypy/doc/objspace.rst --- a/pypy/doc/objspace.rst +++ b/pypy/doc/objspace.rst @@ -341,7 +341,7 @@ using plain integers instead is the complex path, not the other way around. -.. _StdObjSpace: ../objspace/std/ +.. _StdObjSpace: ../../../../pypy/objspace/std/ Object types @@ -394,10 +394,10 @@ For other examples of multiple implementations of the same Python type, see `Standard Interpreter Optimizations`_. -.. _`listtype.py`: ../objspace/std/listtype.py -.. _`stringtype.py`: ../objspace/std/stringtype.py -.. _`tupletype.py`: ../objspace/std/tupletype.py -.. _`tupleobject.py`: ../objspace/std/tupleobject.py +.. _`listtype.py`: ../../../../pypy/objspace/std/listtype.py +.. _`stringtype.py`: ../../../../pypy/objspace/std/stringtype.py +.. _`tupletype.py`: ../../../../pypy/objspace/std/tupletype.py +.. _`tupleobject.py`: ../../../../pypy/objspace/std/tupleobject.py .. _`Standard Interpreter Optimizations`: interpreter-optimizations.html @@ -412,7 +412,7 @@ alone are not enough for the Standard Object Space: the complete picture spans several levels in order to emulate the exact Python semantics. -.. __: ../objspace/std/multimethod.py +.. __: ../../../../pypy/objspace/std/multimethod.py Consider the example of the ``space.getitem(w_a, w_b)`` operation, corresponding to the application-level syntax ``a[b]``. The Standard @@ -600,7 +600,7 @@ v3 = add(v2, Constant(2)) -.. _FlowObjSpace: ../objspace/flow/ +.. _FlowObjSpace: ../../../../pypy/objspace/flow/ The Flow model From commits-noreply at bitbucket.org Thu Mar 17 19:04:36 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:36 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Remove redundant download.rst page, instead using the main site's download page Message-ID: <20110317180436.71B43282BDB@codespeak.net> Author: David Malcolm Branch: Changeset: r42758:fc0e6b399ee7 Date: 2011-03-16 14:48 -0400 http://bitbucket.org/pypy/pypy/changeset/fc0e6b399ee7/ Log: (dmalcolm, lac): Remove redundant download.rst page, instead using the main site's download page diff --git a/pypy/doc/getting-started.rst b/pypy/doc/getting-started.rst --- a/pypy/doc/getting-started.rst +++ b/pypy/doc/getting-started.rst @@ -33,7 +33,7 @@ repository using mercurial. We suggest using mercurial if one wants to access the current development. -.. _`downloading them from the download page`: download.html +.. _`downloading them from the download page`: http://pypy.org/download.html If you choose to use mercurial, first make sure you have ``subversion`` installed. diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -29,7 +29,6 @@ objspace.rst dev_method.rst - download.rst extending.rst extradoc.rst diff --git a/pypy/doc/download.rst b/pypy/doc/download.rst deleted file mode 100644 --- a/pypy/doc/download.rst +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html From commits-noreply at bitbucket.org Thu Mar 17 19:04:37 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:37 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Move maemo.rst from index.rst to cleanup.rst Message-ID: <20110317180437.DBE87282BD7@codespeak.net> Author: David Malcolm Branch: Changeset: r42759:734564b77f53 Date: 2011-03-16 14:53 -0400 http://bitbucket.org/pypy/pypy/changeset/734564b77f53/ Log: (dmalcolm, lac): Move maemo.rst from index.rst to cleanup.rst diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -62,9 +62,6 @@ index-report.rst .. ^^ of historic interest, and about EU fundraising - maemo.rst - .. ^^ obscure corner; not sure of status - stackless.rst .. ^^ it still works; needs JIT integration; hasn't been maintained for years diff --git a/pypy/doc/cleanup.rst b/pypy/doc/cleanup.rst --- a/pypy/doc/cleanup.rst +++ b/pypy/doc/cleanup.rst @@ -43,6 +43,9 @@ dot-net.rst + maemo.rst + .. ^^ obscure corner; not sure of status + Old discussion notes needing categorization ------------------------------------------- .. toctree:: From commits-noreply at bitbucket.org Thu Mar 17 19:04:38 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:38 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Reorder the index page, placing the high-level information at the top Message-ID: <20110317180438.C05B8282BD7@codespeak.net> Author: David Malcolm Branch: Changeset: r42760:246d7e8c0c2d Date: 2011-03-16 14:55 -0400 http://bitbucket.org/pypy/pypy/changeset/246d7e8c0c2d/ Log: (dmalcolm, lac): Reorder the index page, placing the high-level information at the top diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -8,6 +8,62 @@ Rumors have it that the secret goal is being faster-than-C which is nonsense, isn't it? `more...`_ +Getting into PyPy ... +============================================= + +* `Release 1.4`_: the latest official release + +* `PyPy Blog`_: news and status info about PyPy + +* `Documentation`_: extensive documentation and papers_ about PyPy. + +* `Getting Started`_: Getting started and playing with PyPy. + +Mailing lists, bug tracker, IRC channel +============================================= + +* `Development mailing list`_: development and conceptual + discussions. + +* `Subversion commit mailing list`_: updates to code and + documentation. + +* `Development bug/feature tracker`_: filing bugs and feature requests. + +* `Sprint mailing list`_: mailing list for organizing upcoming sprints. + +* **IRC channel #pypy on freenode**: Many of the core developers are hanging out + at #pypy on irc.freenode.net. You are welcome to join and ask questions + (if they are not already developed in the FAQ_). + You can find logs of the channel here_. + +.. XXX play1? + +Meeting PyPy developers +======================= + +The PyPy developers are organizing sprints and presenting results at +conferences all year round. They will be happy to meet in person with +anyone interested in the project. Watch out for sprint announcements +on the `development mailing list`_. + +.. _Python: http://docs.python.org/index.html +.. _`more...`: architecture.html#mission-statement +.. _`PyPy blog`: http://morepypy.blogspot.com/ +.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ +.. _here: http://tismerysoft.de/pypy/irc-logs/pypy +.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint +.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn +.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev +.. _`FAQ`: faq.html +.. _`Documentation`: docindex.html +.. _`Getting Started`: getting-started.html +.. _papers: extradoc.html +.. _`Release 1.4`: http://pypy.org/download.html + + +Detailed Documentation +====================== .. The following documentation is important and reasonably up-to-date: @@ -66,60 +122,6 @@ .. ^^ it still works; needs JIT integration; hasn't been maintained for years -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html - - Indices and tables ================== From commits-noreply at bitbucket.org Thu Mar 17 19:04:40 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:40 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Add discussions page, adding to the bottom of the main index page Message-ID: <20110317180440.A7566282BD7@codespeak.net> Author: David Malcolm Branch: Changeset: r42761:a9bf9909470c Date: 2011-03-16 15:01 -0400 http://bitbucket.org/pypy/pypy/changeset/a9bf9909470c/ Log: (dmalcolm, lac): Add discussions page, adding to the bottom of the main index page diff --git a/pypy/doc/discussions.rst b/pypy/doc/discussions.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/discussions.rst @@ -0,0 +1,41 @@ +Old discussion notes needing categorization +------------------------------------------- + +The following are old discussion notes which may or may not reflect the current reality. + +Help from domain experts would be welcome, since some of these documents probably ought to be moved to a more prominent location, some should be deleted, and some left here. + + +.. toctree:: + + discussion/GC-performance.rst + discussion/VM-integration.rst + discussion/chained_getattr.rst + discussion/cli-optimizations.rst + discussion/cmd-prompt-translation.rst + discussion/compiled-swamp.rst + discussion/ctypes_modules.rst + discussion/ctypes_todo.rst + discussion/distribution.rst + discussion/distribution-implementation.rst + discussion/distribution-newattempt.rst + discussion/distribution-roadmap.rst + discussion/emptying-the-malloc-zoo.rst + discussion/finalizer-order.rst + discussion/gc.rst + discussion/howtoimplementpickling.rst + discussion/improve-rpython.rst + discussion/outline-external-ootype.rst + discussion/oz-thread-api.rst + discussion/paper-wishlist.rst + discussion/parsing-ideas.rst + discussion/pypy_metaclasses_in_cl.rst + discussion/removing-stable-compiler.rst + discussion/security-ideas.rst + discussion/somepbc-refactoring-plan.rst + discussion/summer-of-pypy-pytest.rst + discussion/testing-zope.rst + discussion/thoughts_string_interning.rst + discussion/translation-swamp.rst + discussion/use_case_of_logic.rst + diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -121,6 +121,8 @@ stackless.rst .. ^^ it still works; needs JIT integration; hasn't been maintained for years + discussions.rst + Indices and tables ================== diff --git a/pypy/doc/cleanup.rst b/pypy/doc/cleanup.rst --- a/pypy/doc/cleanup.rst +++ b/pypy/doc/cleanup.rst @@ -46,43 +46,6 @@ maemo.rst .. ^^ obscure corner; not sure of status -Old discussion notes needing categorization -------------------------------------------- -.. toctree:: - - .. The following discussions have not yet been categorized: - - discussion/GC-performance.rst - discussion/VM-integration.rst - discussion/chained_getattr.rst - discussion/cli-optimizations.rst - discussion/cmd-prompt-translation.rst - discussion/compiled-swamp.rst - discussion/ctypes_modules.rst - discussion/ctypes_todo.rst - discussion/distribution.rst - discussion/distribution-implementation.rst - discussion/distribution-newattempt.rst - discussion/distribution-roadmap.rst - discussion/emptying-the-malloc-zoo.rst - discussion/finalizer-order.rst - discussion/gc.rst - discussion/howtoimplementpickling.rst - discussion/improve-rpython.rst - discussion/outline-external-ootype.rst - discussion/oz-thread-api.rst - discussion/paper-wishlist.rst - discussion/parsing-ideas.rst - discussion/pypy_metaclasses_in_cl.rst - discussion/removing-stable-compiler.rst - discussion/security-ideas.rst - discussion/somepbc-refactoring-plan.rst - discussion/summer-of-pypy-pytest.rst - discussion/testing-zope.rst - discussion/thoughts_string_interning.rst - discussion/translation-swamp.rst - discussion/use_case_of_logic.rst - .. STUFF THAT'S DIFFICULT TO CATEGORIZE video-index.rst From commits-noreply at bitbucket.org Thu Mar 17 19:04:45 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:45 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Add links to sprint reports to bottom of front page Message-ID: <20110317180445.05F8F282C1B@codespeak.net> Author: David Malcolm Branch: Changeset: r42762:b2e52db1401b Date: 2011-03-16 15:23 -0400 http://bitbucket.org/pypy/pypy/changeset/b2e52db1401b/ Log: (dmalcolm, lac): Add links to sprint reports to bottom of front page diff --git a/pypy/doc/sprint-reports.rst b/pypy/doc/sprint-reports.rst --- a/pypy/doc/sprint-reports.rst +++ b/pypy/doc/sprint-reports.rst @@ -78,3 +78,6 @@ .. _`CERN (July 2010)`: http://morepypy.blogspot.com/2010/07/cern-sprint-report-wrapping-c-libraries.html .. _`Düsseldorf (October 2010)`: http://morepypy.blogspot.com/2010/10/dusseldorf-sprint-report-2010.html +Further event notes: + +* :ref:`eventhistory.rst` diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -123,6 +123,11 @@ discussions.rst + cleanup.rst + + sprint-reports.rst + + eventhistory.rst Indices and tables ================== diff --git a/pypy/doc/eventhistory.rst b/pypy/doc/eventhistory.rst --- a/pypy/doc/eventhistory.rst +++ b/pypy/doc/eventhistory.rst @@ -1,4 +1,6 @@ - +------------ +More sprints +------------ The PyPy project is a worldwide collaborative effort and its members are organizing sprints and presenting results at conferences diff --git a/pypy/doc/cleanup.rst b/pypy/doc/cleanup.rst --- a/pypy/doc/cleanup.rst +++ b/pypy/doc/cleanup.rst @@ -5,7 +5,6 @@ .. The following stuff is old (and crufty?), and needs further investigation: buildtool.rst distribution.rst - eventhistory.rst .. ^^ Incomplete, superceded elsewhere externaltools.rst @@ -18,8 +17,6 @@ old_news.rst - sprint-reports.rst - project-ideas.rst rffi.rst From commits-noreply at bitbucket.org Thu Mar 17 19:04:46 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:46 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Add links to papers and to videos near the top of the index page Message-ID: <20110317180446.0BA48282BDB@codespeak.net> Author: David Malcolm Branch: Changeset: r42763:fd5be84f2d3d Date: 2011-03-16 15:32 -0400 http://bitbucket.org/pypy/pypy/changeset/fd5be84f2d3d/ Log: (dmalcolm, lac): Add links to papers and to videos near the top of the index page diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -19,6 +19,11 @@ * `Getting Started`_: Getting started and playing with PyPy. +* `Papers`_: Academic papers, talks, and related projects + +* `Videos`_: Videos of PyPy talks and presentations + + Mailing lists, bug tracker, IRC channel ============================================= @@ -58,10 +63,10 @@ .. _`FAQ`: faq.html .. _`Documentation`: docindex.html .. _`Getting Started`: getting-started.html -.. _papers: extradoc.html +.. _`Papers`: extradoc.html +.. _`Videos`: video-index.html .. _`Release 1.4`: http://pypy.org/download.html - Detailed Documentation ====================== From commits-noreply at bitbucket.org Thu Mar 17 19:04:46 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:46 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Add a link to speed.pypy.org Message-ID: <20110317180446.E6606282BDB@codespeak.net> Author: David Malcolm Branch: Changeset: r42764:11a3f52b3c39 Date: 2011-03-16 15:43 -0400 http://bitbucket.org/pypy/pypy/changeset/11a3f52b3c39/ Log: (dmalcolm, lac): Add a link to speed.pypy.org diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -15,7 +15,7 @@ * `PyPy Blog`_: news and status info about PyPy -* `Documentation`_: extensive documentation and papers_ about PyPy. +* `Documentation`_: extensive documentation about PyPy. * `Getting Started`_: Getting started and playing with PyPy. @@ -23,6 +23,8 @@ * `Videos`_: Videos of PyPy talks and presentations +* `speed.pypy.org`_: Daily benchmarks of how fast PyPy is + Mailing lists, bug tracker, IRC channel ============================================= @@ -66,6 +68,7 @@ .. _`Papers`: extradoc.html .. _`Videos`: video-index.html .. _`Release 1.4`: http://pypy.org/download.html +.. _`speed.pypy.org`: http://speed.pypy.org Detailed Documentation ====================== From commits-noreply at bitbucket.org Thu Mar 17 19:04:52 2011 From: commits-noreply at bitbucket.org (dmalcolm) Date: Thu, 17 Mar 2011 19:04:52 +0100 (CET) Subject: [pypy-svn] pypy default: (dmalcolm, lac): Move various comments from inside toctree directives to other locations, as it causes warnings Message-ID: <20110317180452.BF970282BD6@codespeak.net> Author: David Malcolm Branch: Changeset: r42765:177cd3582f16 Date: 2011-03-16 16:04 -0400 http://bitbucket.org/pypy/pypy/changeset/177cd3582f16/ Log: (dmalcolm, lac): Move various comments from inside toctree directives to other locations, as it causes warnings diff --git a/pypy/doc/geninterp.rst b/pypy/doc/geninterp.rst --- a/pypy/doc/geninterp.rst +++ b/pypy/doc/geninterp.rst @@ -1,3 +1,7 @@ +.. include:: crufty.rst + + .. ^^ apparently dead + The Interpreter-Level backend ----------------------------- diff --git a/pypy/doc/sandbox.rst b/pypy/doc/sandbox.rst --- a/pypy/doc/sandbox.rst +++ b/pypy/doc/sandbox.rst @@ -1,3 +1,7 @@ +.. include:: crufty.rst + + .. ^^ it continues to work, but is unmaintained + PyPy's sandboxing features ========================== diff --git a/pypy/doc/index.rst b/pypy/doc/index.rst --- a/pypy/doc/index.rst +++ b/pypy/doc/index.rst @@ -75,10 +75,12 @@ .. The following documentation is important and reasonably up-to-date: +.. extradoc: should this be integrated one level up: dcolish? + + .. toctree:: :maxdepth: 1 - .. The following stuff is high-value and (vaguely) true: getting-started.rst getting-started-python.rst getting-started-dev.rst @@ -96,13 +98,11 @@ extending.rst extradoc.rst - .. ^^ integrate this one level up: dcolish? glossary.rst contributor.rst - .. True, high-detail: interpreter-optimizations.rst configuration.rst low-level-encapsulation.rst @@ -118,16 +118,12 @@ index-of-release-notes.rst ctypes-implementation.rst - .. ^^ needs attention how-to-release.rst - .. ^^ needs attention index-report.rst - .. ^^ of historic interest, and about EU fundraising stackless.rst - .. ^^ it still works; needs JIT integration; hasn't been maintained for years discussions.rst diff --git a/pypy/doc/how-to-release.rst b/pypy/doc/how-to-release.rst --- a/pypy/doc/how-to-release.rst +++ b/pypy/doc/how-to-release.rst @@ -1,3 +1,5 @@ +.. include:: crufty.rst + Making a PyPy Release ======================= diff --git a/pypy/doc/externaltools.rst b/pypy/doc/externaltools.rst --- a/pypy/doc/externaltools.rst +++ b/pypy/doc/externaltools.rst @@ -1,3 +1,7 @@ +.. include:: crufty.rst + + .. ^^ Incomplete and wrong, superceded elsewhere + External tools&programs needed by PyPy ====================================== diff --git a/pypy/doc/ctypes-implementation.rst b/pypy/doc/ctypes-implementation.rst --- a/pypy/doc/ctypes-implementation.rst +++ b/pypy/doc/ctypes-implementation.rst @@ -1,3 +1,4 @@ +.. include:: crufty.rst ============================= PyPy's ctypes implementation diff --git a/pypy/doc/distribution.rst b/pypy/doc/distribution.rst --- a/pypy/doc/distribution.rst +++ b/pypy/doc/distribution.rst @@ -1,3 +1,6 @@ +.. include:: crufty.rst + + .. ^^ Incomplete, superceded elsewhere ======================== lib/distributed features diff --git a/pypy/doc/index-report.rst b/pypy/doc/index-report.rst --- a/pypy/doc/index-report.rst +++ b/pypy/doc/index-report.rst @@ -1,3 +1,5 @@ +.. include:: crufty.rst + ============================================ PyPy - Overview over the EU-reports ============================================ diff --git a/pypy/doc/stackless.rst b/pypy/doc/stackless.rst --- a/pypy/doc/stackless.rst +++ b/pypy/doc/stackless.rst @@ -2,9 +2,15 @@ Application-level Stackless features ========================================================== + + Introduction ================ +.. include:: crufty.rst + + .. apparently this still works; needs JIT integration; hasn't been maintained for years + PyPy can expose to its user language features similar to the ones present in `Stackless Python`_: **no recursion depth limit**, and the ability to write code in a **massively concurrent style**. It actually diff --git a/pypy/doc/cleanup.rst b/pypy/doc/cleanup.rst --- a/pypy/doc/cleanup.rst +++ b/pypy/doc/cleanup.rst @@ -1,17 +1,21 @@ Old documentation that needs review ----------------------------------- + +.. The following stuff is old (and crufty?), and needs further investigation: + +.. doc-index: This needs merging somehow + +.. svn-help.rst: Needs merging/replacing with hg stuff: + + .. toctree:: - .. The following stuff is old (and crufty?), and needs further investigation: buildtool.rst distribution.rst - .. ^^ Incomplete, superceded elsewhere externaltools.rst - .. ^^ Incomplete and wrong, superceded elsewhere geninterp.rst - .. ^^ apparently dead objspace-proxies.rst @@ -22,29 +26,21 @@ rffi.rst sandbox.rst - .. ^^ it continues to work, but is unmaintained statistic/index.rst theory.rst - .. ^^ old ideas; we're not doing it this way any more translation-aspects.rst - .. ^^ old and needs updating - .. This needs merging somehow: docindex.rst - .. Needs merging/replacing with hg stuff: svn-help.rst dot-net.rst maemo.rst - .. ^^ obscure corner; not sure of status - .. STUFF THAT'S DIFFICULT TO CATEGORIZE - video-index.rst diff --git a/pypy/doc/translation-aspects.rst b/pypy/doc/translation-aspects.rst --- a/pypy/doc/translation-aspects.rst +++ b/pypy/doc/translation-aspects.rst @@ -1,3 +1,6 @@ +.. include:: crufty.rst +.. ^^ old and needs updating + ========================================================================================== Memory management and threading models as translation aspects -- solutions and challenges ========================================================================================== diff --git a/pypy/doc/theory.rst b/pypy/doc/theory.rst --- a/pypy/doc/theory.rst +++ b/pypy/doc/theory.rst @@ -1,3 +1,7 @@ +.. include:: crufty.rst + + .. ^^ old ideas; we're not doing it this way any more + =================================== Techniques used in PyPy =================================== From commits-noreply at bitbucket.org Thu Mar 17 19:04:54 2011 From: commits-noreply at bitbucket.org (lac) Date: Thu, 17 Mar 2011 19:04:54 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110317180454.CF454282BE8@codespeak.net> Author: Laura Creighton Branch: Changeset: r42766:b1ebea4e80bd Date: 2011-03-17 18:54 +0100 http://bitbucket.org/pypy/pypy/changeset/b1ebea4e80bd/ Log: merge heads diff --git a/pypy/jit/metainterp/simple_optimize.py b/pypy/jit/metainterp/simple_optimize.py deleted file mode 100644 --- a/pypy/jit/metainterp/simple_optimize.py +++ /dev/null @@ -1,56 +0,0 @@ - -""" Simplified optimize.py -""" - -from pypy.jit.metainterp.resoperation import rop, ResOperation -from pypy.jit.metainterp import resume, compile - -EMPTY_VALUES = {} - -def transform(op): - from pypy.jit.metainterp.history import AbstractDescr - # Rename CALL_PURE and CALL_LOOPINVARIANT to CALL. - # Simplify the VIRTUAL_REF_* so that they don't show up in the backend. - if op.getopnum() == rop.CALL_PURE: - op = ResOperation(rop.CALL, op.getarglist()[1:], op.result, - op.getdescr()) - elif op.getopnum() == rop.CALL_LOOPINVARIANT: - op = op.copy_and_change(rop.CALL) - elif op.getopnum() == rop.VIRTUAL_REF: - op = ResOperation(rop.SAME_AS, [op.getarg(0)], op.result) - elif op.getopnum() == rop.VIRTUAL_REF_FINISH: - return [] - return [op] - -def optimize_loop(metainterp_sd, old_loops, loop): - if old_loops: - assert len(old_loops) == 1 - return old_loops[0] - else: - # copy loop operations here - # we need it since the backend can modify those lists, which make - # get_guard_op in compile.py invalid - # in fact, x86 modifies this list for moving GCs - memo = resume.ResumeDataLoopMemo(metainterp_sd) - newoperations = [] - for op in loop.operations: - if op.is_guard(): - descr = op.getdescr() - assert isinstance(descr, compile.ResumeGuardDescr) - modifier = resume.ResumeDataVirtualAdder(descr, memo) - newboxes = modifier.finish(EMPTY_VALUES) - descr.store_final_boxes(op, newboxes) - newoperations.extend(transform(op)) - loop.operations = newoperations - jumpop = newoperations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(loop.token) - return None - -def optimize_bridge(metainterp_sd, old_loops, loop, inline_short_preamble, - retraced): - optimize_loop(metainterp_sd, [], loop) - jumpop = loop.operations[-1] - if jumpop.getopnum() == rop.JUMP: - jumpop.setdescr(old_loops[0]) - return old_loops[0] From commits-noreply at bitbucket.org Thu Mar 17 19:10:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 19:10:17 +0100 (CET) Subject: [pypy-svn] pypy default: Port this failing test from lib-python. Message-ID: <20110317181017.49378282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42767:859e416f3233 Date: 2011-03-17 14:00 -0400 http://bitbucket.org/pypy/pypy/changeset/859e416f3233/ Log: Port this failing test from lib-python. diff --git a/pypy/module/_ast/test/test_ast.py b/pypy/module/_ast/test/test_ast.py --- a/pypy/module/_ast/test/test_ast.py +++ b/pypy/module/_ast/test/test_ast.py @@ -232,3 +232,13 @@ raises(TypeError, ast.Num, 1, 2) raises(TypeError, ast.Num, 1, 2, lineno=0) + + def test_node_identity(self): + import _ast as ast + n1 = ast.Num(1) + n3 = ast.Num(3) + addop = ast.Add() + x = ast.BinOp(n1, addop, n3) + assert x.left == n1 + assert x.op == addop + assert x.right == n3 From commits-noreply at bitbucket.org Thu Mar 17 19:10:18 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 19:10:18 +0100 (CET) Subject: [pypy-svn] pypy default: Force saving in the __dict__ the original object whenever we do a Message-ID: <20110317181018.2FF3C282BD6@codespeak.net> Author: Armin Rigo Branch: Changeset: r42768:eb7adda789aa Date: 2011-03-17 14:09 -0400 http://bitbucket.org/pypy/pypy/changeset/eb7adda789aa/ Log: Force saving in the __dict__ the original object whenever we do a Python-level assignment to a field which, like 'op', gets internally turned into a to_simple_int(). diff --git a/pypy/interpreter/astcompiler/tools/asdl_py.py b/pypy/interpreter/astcompiler/tools/asdl_py.py --- a/pypy/interpreter/astcompiler/tools/asdl_py.py +++ b/pypy/interpreter/astcompiler/tools/asdl_py.py @@ -454,6 +454,9 @@ (field.type,), 2) self.emit("w_self.%s = obj.to_simple_int(space)" % (field.name,), 2) + self.emit("# need to save the original object too", 2) + self.emit("w_self.setdictvalue(space, '%s', w_new_value)" + % (field.name,), 2) else: config = (field.name, field.type, repr(field.opt)) self.emit("w_self.%s = space.interp_w(%s, w_new_value, %s)" % diff --git a/pypy/interpreter/astcompiler/ast.py b/pypy/interpreter/astcompiler/ast.py --- a/pypy/interpreter/astcompiler/ast.py +++ b/pypy/interpreter/astcompiler/ast.py @@ -3590,6 +3590,8 @@ try: obj = space.interp_w(operator, w_new_value) w_self.op = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'op', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -4824,6 +4826,8 @@ try: obj = space.interp_w(boolop, w_new_value) w_self.op = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'op', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -4911,6 +4915,8 @@ try: obj = space.interp_w(operator, w_new_value) w_self.op = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'op', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -4980,6 +4986,8 @@ try: obj = space.interp_w(unaryop, w_new_value) w_self.op = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'op', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -6028,6 +6036,8 @@ try: obj = space.interp_w(expr_context, w_new_value) w_self.ctx = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'ctx', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -6118,6 +6128,8 @@ try: obj = space.interp_w(expr_context, w_new_value) w_self.ctx = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'ctx', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -6187,6 +6199,8 @@ try: obj = space.interp_w(expr_context, w_new_value) w_self.ctx = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'ctx', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -6252,6 +6266,8 @@ try: obj = space.interp_w(expr_context, w_new_value) w_self.ctx = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'ctx', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise @@ -6318,6 +6334,8 @@ try: obj = space.interp_w(expr_context, w_new_value) w_self.ctx = obj.to_simple_int(space) + # need to save the original object too + w_self.setdictvalue(space, 'ctx', w_new_value) except OperationError, e: if not e.match(space, space.w_TypeError): raise From commits-noreply at bitbucket.org Thu Mar 17 19:45:18 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 19:45:18 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: Write a test. Message-ID: <20110317184518.2A208282BD6@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42769:a068a8ba6d68 Date: 2011-03-17 14:44 -0400 http://bitbucket.org/pypy/pypy/changeset/a068a8ba6d68/ Log: Write a test. diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -5,7 +5,7 @@ soon as possible (at least in a simple case). """ -import weakref, random +import weakref, random, sys import py from pypy.annotation import policy as annpolicy from pypy.rlib import rgc @@ -19,7 +19,6 @@ from pypy.tool.udir import udir from pypy.jit.backend.x86.arch import IS_X86_64 from pypy.config.translationoption import DEFL_GC -import py.test class X(object): def __init__(self, x=0): @@ -129,6 +128,8 @@ class TestCompileFramework(object): # Test suite using (so far) the minimark GC. + EXTRA_PARAMS = {} + def setup_class(cls): funcs = [] name_to_func = {} @@ -178,7 +179,8 @@ try: GcLLDescr_framework.DEBUG = True cls.cbuilder = compile(get_entry(allfuncs), DEFL_GC, - gcrootfinder="asmgcc", jit=True) + gcrootfinder="asmgcc", jit=True, + **cls.EXTRA_PARAMS) finally: GcLLDescr_framework.DEBUG = OLD_DEBUG @@ -576,3 +578,12 @@ def test_compile_framework_minimal_size_in_nursery(self): self.run('compile_framework_minimal_size_in_nursery') + + +class TestCompressPtr(TestCompileFramework): + EXTRA_PARAMS = {'compressptr': True} + + def setup_class(cls): + if sys.maxint == 2147483647: + py.test.skip("for 64-bit only") + TestCompileFramework.setup_class.im_func(cls) From commits-noreply at bitbucket.org Thu Mar 17 19:45:28 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 17 Mar 2011 19:45:28 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: Merge default. Message-ID: <20110317184528.EA182282BD9@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42770:f456809fb7bd Date: 2011-03-17 14:44 -0400 http://bitbucket.org/pypy/pypy/changeset/f456809fb7bd/ Log: Merge default. diff --git a/pypy/module/readline/test/test_c_readline.py b/pypy/module/readline/test/test_c_readline.py deleted file mode 100644 --- a/pypy/module/readline/test/test_c_readline.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -Directly test the basic ctypes wrappers. -""" - -import py -from pypy import conftest; conftest.translation_test_so_skip_if_appdirect() -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -def test_basic_import(): - c_readline.c_rl_initialize() diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py --- a/pypy/rpython/llinterp.py +++ b/pypy/rpython/llinterp.py @@ -840,9 +840,18 @@ def op_gc_thread_run(self): self.heap.thread_run() + def op_gc_thread_start(self): + self.heap.thread_start() + def op_gc_thread_die(self): self.heap.thread_die() + def op_gc_thread_before_fork(self): + raise NotImplementedError + + def op_gc_thread_after_fork(self): + raise NotImplementedError + def op_gc_free(self, addr): # what can you do? pass @@ -1065,20 +1074,6 @@ except OverflowError: self.make_llexception() - def op_llong_neg_ovf(self, x): - assert type(x) is r_longlong - try: - return ovfcheck(-x) - except OverflowError: - self.make_llexception() - - def op_llong_abs_ovf(self, x): - assert type(x) is r_longlong - try: - return ovfcheck(abs(x)) - except OverflowError: - self.make_llexception() - def op_int_lshift_ovf(self, x, y): assert isinstance(x, int) assert isinstance(y, int) diff --git a/pypy/module/__builtin__/app_file_stub.py b/pypy/module/__builtin__/app_file_stub.py deleted file mode 100644 --- a/pypy/module/__builtin__/app_file_stub.py +++ /dev/null @@ -1,20 +0,0 @@ -# NOT_RPYTHON - -class file(object): - """file(name[, mode[, buffering]]) -> file object - -Open a file. The mode can be 'r', 'w' or 'a' for reading (default), -writing or appending. The file will be created if it doesn't exist -when opened for writing or appending; it will be truncated when -opened for writing. Add a 'b' to the mode for binary files. -Add a '+' to the mode to allow simultaneous reading and writing. -If the buffering argument is given, 0 means unbuffered, 1 means line -buffered, and larger numbers specify the buffer size. -Add a 'U' to mode to open the file for input with universal newline -support. Any line ending in the input file will be seen as a '\n' -in Python. Also, a file so opened gains the attribute 'newlines'; -the value for this attribute is one of None (no newline read yet), -'\r', '\n', '\r\n' or a tuple containing all the newline types seen. - -Note: open() is an alias for file(). -""" diff --git a/pypy/translator/c/src/stack.h b/pypy/translator/c/src/stack.h --- a/pypy/translator/c/src/stack.h +++ b/pypy/translator/c/src/stack.h @@ -21,7 +21,6 @@ char LL_stack_too_big_slowpath(long); /* returns 0 (ok) or 1 (too big) */ /* some macros referenced from pypy.rlib.rstack */ -#define OP_STACK_CURRENT(r) r = (long)&r #define LL_stack_get_start() ((long)_LLstacktoobig_stack_start) #define LL_stack_get_length() MAX_STACK_SIZE #define LL_stack_get_start_adr() ((long)&_LLstacktoobig_stack_start) /* JIT */ diff --git a/pypy/module/readline/c_readline.py b/pypy/module/readline/c_readline.py deleted file mode 100644 --- a/pypy/module/readline/c_readline.py +++ /dev/null @@ -1,77 +0,0 @@ -from pypy.rpython.tool import rffi_platform as platform -from pypy.rpython.lltypesystem import lltype, rffi -from pypy.interpreter.error import OperationError -from pypy.interpreter.gateway import ObjSpace, interp2app -from pypy.translator.tool.cbuild import ExternalCompilationInfo - -# On various platforms, linking only with libreadline is not enough; -# we also need to link with some variant of curses or libtermcap. -# We follow the logic of CPython below. -def try_with_lib(extralibs, **kwds): - global most_recent_error - # at least on Gentoo Linux, readline.h doesn't compile if stdio.h is not - # included before - eci = ExternalCompilationInfo( - includes = ["stdio.h", "readline/readline.h", "readline/history.h"], - libraries = extralibs + ['readline'], - ) - try: - platform.verify_eci(eci) - return eci - except platform.CompilationError, e: - most_recent_error = e - return None - -eci = (try_with_lib([]) or - try_with_lib(['ncursesw']) or - try_with_lib(['ncurses']) or - try_with_lib(['curses']) or - try_with_lib(['termcap'], library_dirs=['/usr/lib/termcap'])) -if eci is None: - raise most_recent_error - -# ____________________________________________________________ - -def external(name, args, result): - return rffi.llexternal(name, args, result, compilation_info=eci) - -# get a binding to c library functions and define their args and return types -# char *readline(char *) -c_readline = external('readline', [rffi.CCHARP], rffi.CCHARP) - -# void rl_initiliaze(void) -c_rl_initialize = external('rl_initialize', [], lltype.Void) - -# void using_history(void) -c_using_history = external('using_history', [], lltype.Void) - -# void add_history(const char *) -c_add_history = external('add_history', [rffi.CCHARP], lltype.Void) - -#------------------------------------------------------------ -# special initialization of readline - -class ReadlineState(object): - lastline = "" # XXX possibly temporary hack -readlinestate = ReadlineState() - -def setup_readline(space, w_module): - c_using_history() - # XXX CPython initializes more stuff here - c_rl_initialize() - # install sys.__raw_input__, a hook that will be used by raw_input() - space.setitem(space.sys.w_dict, space.wrap('__raw_input__'), - space.wrap(app_readline_func)) - -def readline_func(space, prompt): - ll_res = c_readline(prompt) - if not ll_res: - raise OperationError(space.w_EOFError, space.w_None) - res = rffi.charp2str(ll_res) - if res and res != readlinestate.lastline: - readlinestate.lastline = res - c_add_history(res) - return space.wrap(res) - -readline_func.unwrap_spec = [ObjSpace, str] -app_readline_func = interp2app(readline_func) diff --git a/pypy/module/readline/app_stub.py b/pypy/module/readline/app_stub.py deleted file mode 100644 --- a/pypy/module/readline/app_stub.py +++ /dev/null @@ -1,13 +0,0 @@ -# NOT_RPYTHON - -def stub(*args, **kwds): - import warnings - warnings.warn("the 'readline' module is only a stub so far") - -def stub_str(*args, **kwds): - stub() - return '' - -def stub_int(*args, **kwds): - stub() - return 0 diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -963,11 +963,32 @@ if hasattr(self.root_walker, 'thread_run_ptr'): hop.genop("direct_call", [self.root_walker.thread_run_ptr]) + def gct_gc_thread_start(self, hop): + assert self.translator.config.translation.thread + if hasattr(self.root_walker, 'thread_start_ptr'): + hop.genop("direct_call", [self.root_walker.thread_start_ptr]) + def gct_gc_thread_die(self, hop): assert self.translator.config.translation.thread if hasattr(self.root_walker, 'thread_die_ptr'): hop.genop("direct_call", [self.root_walker.thread_die_ptr]) + def gct_gc_thread_before_fork(self, hop): + if (self.translator.config.translation.thread + and hasattr(self.root_walker, 'thread_before_fork_ptr')): + hop.genop("direct_call", [self.root_walker.thread_before_fork_ptr], + resultvar=hop.spaceop.result) + else: + c_null = rmodel.inputconst(llmemory.Address, llmemory.NULL) + hop.genop("same_as", [c_null], + resultvar=hop.spaceop.result) + + def gct_gc_thread_after_fork(self, hop): + if (self.translator.config.translation.thread + and hasattr(self.root_walker, 'thread_after_fork_ptr')): + hop.genop("direct_call", [self.root_walker.thread_after_fork_ptr] + + hop.spaceop.args) + def gct_gc_get_type_info_group(self, hop): return hop.cast_result(self.c_type_info_group) @@ -1278,6 +1299,7 @@ class BaseRootWalker(object): need_root_stack = False + thread_setup = None def __init__(self, gctransformer): self.gcdata = gctransformer.gcdata @@ -1287,7 +1309,8 @@ return True def setup_root_walker(self): - pass + if self.thread_setup is not None: + self.thread_setup() def walk_roots(self, collect_stack_root, collect_static_in_prebuilt_nongc, @@ -1320,7 +1343,6 @@ class ShadowStackRootWalker(BaseRootWalker): need_root_stack = True - thread_setup = None collect_stacks_from_other_threads = None def __init__(self, gctransformer): @@ -1360,8 +1382,7 @@ ll_assert(bool(stackbase), "could not allocate root stack") self.gcdata.root_stack_top = stackbase self.gcdata.root_stack_base = stackbase - if self.thread_setup is not None: - self.thread_setup() + BaseRootWalker.setup_root_walker(self) def walk_stack_roots(self, collect_stack_root): gcdata = self.gcdata @@ -1426,6 +1447,9 @@ occur in this thread. """ aid = get_aid() + if aid == gcdata.main_thread: + return # ignore calls to thread_die() in the main thread + # (which can occur after a fork()). gcdata.thread_stacks.setitem(aid, llmemory.NULL) old = gcdata.root_stack_base if gcdata._fresh_rootstack == llmemory.NULL: @@ -1471,7 +1495,7 @@ gcdata.active_thread = new_aid def collect_stack(aid, stacktop, callback): - if stacktop != llmemory.NULL and aid != get_aid(): + if stacktop != llmemory.NULL and aid != gcdata.active_thread: # collect all valid stacks from the dict (the entry # corresponding to the current thread is not valid) gc = self.gc @@ -1483,11 +1507,45 @@ addr += sizeofaddr def collect_more_stacks(callback): + ll_assert(get_aid() == gcdata.active_thread, + "collect_more_stacks(): invalid active_thread") gcdata.thread_stacks.foreach(collect_stack, callback) + def _free_if_not_current(aid, stacktop, _): + if stacktop != llmemory.NULL and aid != gcdata.active_thread: + end = stacktop - sizeofaddr + base = end.address[0] + llmemory.raw_free(base) + + def thread_after_fork(result_of_fork, opaqueaddr): + # we don't need a thread_before_fork in this case, so + # opaqueaddr == NULL. This is called after fork(). + if result_of_fork == 0: + # We are in the child process. Assumes that only the + # current thread survived, so frees the shadow stacks + # of all the other ones. + gcdata.thread_stacks.foreach(_free_if_not_current, None) + # Clears the dict (including the current thread, which + # was an invalid entry anyway and will be recreated by + # the next call to save_away_current_stack()). + gcdata.thread_stacks.clear() + # Finally, reset the stored thread IDs, in case it + # changed because of fork(). Also change the main + # thread to the current one (because there is not any + # other left). + aid = get_aid() + gcdata.main_thread = aid + gcdata.active_thread = aid + self.thread_setup = thread_setup self.thread_prepare_ptr = getfn(thread_prepare, [], annmodel.s_None) self.thread_run_ptr = getfn(thread_run, [], annmodel.s_None, inline=True) + # no thread_start_ptr here self.thread_die_ptr = getfn(thread_die, [], annmodel.s_None) + # no thread_before_fork_ptr here + self.thread_after_fork_ptr = getfn(thread_after_fork, + [annmodel.SomeInteger(), + annmodel.SomeAddress()], + annmodel.s_None) self.collect_stacks_from_other_threads = collect_more_stacks diff --git a/pypy/jit/metainterp/simple_optimize.py b/pypy/jit/metainterp/simple_optimize.py --- a/pypy/jit/metainterp/simple_optimize.py +++ b/pypy/jit/metainterp/simple_optimize.py @@ -47,7 +47,8 @@ jumpop.setdescr(loop.token) return None -def optimize_bridge(metainterp_sd, old_loops, loop, inline_short_preamble): +def optimize_bridge(metainterp_sd, old_loops, loop, inline_short_preamble, + retraced): optimize_loop(metainterp_sd, [], loop) jumpop = loop.operations[-1] if jumpop.getopnum() == rop.JUMP: diff --git a/pypy/module/pyexpat/interp_pyexpat.py b/pypy/module/pyexpat/interp_pyexpat.py --- a/pypy/module/pyexpat/interp_pyexpat.py +++ b/pypy/module/pyexpat/interp_pyexpat.py @@ -20,10 +20,6 @@ eci = ExternalCompilationInfo( libraries=[libname], includes=['expat.h'], - pre_include_bits=[ - '#define XML_COMBINED_VERSION' + - ' (10000*XML_MAJOR_VERSION+100*XML_MINOR_VERSION+XML_MICRO_VERSION)', - ], ) eci = rffi_platform.configure_external_library( @@ -54,13 +50,17 @@ 'XML_PARAM_ENTITY_PARSING_UNLESS_STANDALONE', 'XML_PARAM_ENTITY_PARSING_ALWAYS']: locals()[name] = rffi_platform.ConstantInteger(name) - XML_COMBINED_VERSION = rffi_platform.ConstantInteger('XML_COMBINED_VERSION') + XML_MAJOR_VERSION = rffi_platform.ConstantInteger('XML_MAJOR_VERSION') + XML_MINOR_VERSION = rffi_platform.ConstantInteger('XML_MINOR_VERSION') + XML_MICRO_VERSION = rffi_platform.ConstantInteger('XML_MICRO_VERSION') XML_FALSE = rffi_platform.ConstantInteger('XML_FALSE') XML_TRUE = rffi_platform.ConstantInteger('XML_TRUE') for k, v in rffi_platform.configure(CConfigure).items(): globals()[k] = v +XML_COMBINED_VERSION = 10000*XML_MAJOR_VERSION+100*XML_MINOR_VERSION+XML_MICRO_VERSION + XML_Content_Ptr.TO.become(rffi.CArray(XML_Content)) XML_Encoding_Ptr = lltype.Ptr(XML_Encoding) @@ -181,8 +181,8 @@ if name in ['ExternalEntityRefHandler', 'NotStandaloneHandler']: result_type = rffi.INT - result_converter = "space.int_w(w_result)" - result_error = "0" + result_converter = "rffi.cast(rffi.INT, space.int_w(w_result))" + result_error = "rffi.cast(rffi.INT, 0)" else: result_type = lltype.Void result_converter = "None" @@ -313,6 +313,18 @@ 'XML_ExternalEntityParserCreate', [XML_Parser, rffi.CCHARP, rffi.CCHARP], XML_Parser) +XML_ExpatVersion = expat_external( + 'XML_ExpatVersion', [], rffi.CCHARP) + +def get_expat_version(space): + return space.wrap(rffi.charp2str(XML_ExpatVersion())) + +def get_expat_version_info(space): + return space.newtuple([ + space.wrap(XML_MAJOR_VERSION), + space.wrap(XML_MINOR_VERSION), + space.wrap(XML_MICRO_VERSION)]) + class W_XMLParserType(Wrappable): def __init__(self, space, parser, w_intern): @@ -583,10 +595,11 @@ msg = "%s: line %d, column %d" % (err, lineno, colno) w_module = space.getbuiltinmodule('pyexpat') w_errorcls = space.getattr(w_module, space.wrap('error')) - w_error = space.call_function( - w_errorcls, - space.wrap(msg), space.wrap(code), - space.wrap(colno), space.wrap(lineno)) + w_error = space.call_function(w_errorcls, space.wrap(msg)) + space.setattr(w_error, space.wrap("code"), space.wrap(code)) + space.setattr(w_error, space.wrap("offset"), space.wrap(colno)) + space.setattr(w_error, space.wrap("lineno"), space.wrap(lineno)) + self.w_error = w_error return OperationError(w_errorcls, w_error) diff --git a/pypy/module/readline/test/test_with_pypy.py b/pypy/module/readline/test/test_with_pypy.py deleted file mode 100644 --- a/pypy/module/readline/test/test_with_pypy.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Test the readline library on top of PyPy. The following tests run -in the PyPy interpreter, itself running on top of CPython -""" - -import py -from pypy.conftest import gettestobjspace -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -class AppTestReadline: - - def setup_class(cls): - # enable usage of the readline mixedmodule - space = gettestobjspace(usemodules=('readline',)) - cls.space = space - - def test_basic_import(self): - # this is interpreted by PyPy - import readline - readline.readline - # XXX test more diff --git a/pypy/rlib/rmmap.py b/pypy/rlib/rmmap.py --- a/pypy/rlib/rmmap.py +++ b/pypy/rlib/rmmap.py @@ -21,7 +21,11 @@ class RTypeError(Exception): def __init__(self, message): - self.message = message + self.message = message + +class ROverflowError(Exception): + def __init__(self, message): + self.message = message includes = ["sys/types.h"] if _POSIX: @@ -39,8 +43,6 @@ ) size_t = rffi_platform.SimpleType("size_t", rffi.LONG) off_t = rffi_platform.SimpleType("off_t", rffi.LONG) - if _MS_WINDOWS: - LPSECURITY_ATTRIBUTES = rffi_platform.SimpleType("LPSECURITY_ATTRIBUTES", rffi.CCHARP) constants = {} if _POSIX: @@ -71,6 +73,8 @@ for name in constant_names: setattr(CConfig, name, rffi_platform.ConstantInteger(name)) + from pypy.rlib import rwin32 + from pypy.rlib.rwin32 import HANDLE, LPHANDLE from pypy.rlib.rwin32 import NULL_HANDLE, INVALID_HANDLE_VALUE from pypy.rlib.rwin32 import DWORD, WORD, DWORD_PTR, LPDWORD @@ -128,9 +132,6 @@ _, _get_page_size = external('getpagesize', [], rffi.INT) _get_allocation_granularity = _get_page_size - def _get_error_no(): - return rposix.get_errno() - elif _MS_WINDOWS: class ComplexCConfig: @@ -167,13 +168,6 @@ ("wProcessorRevision", WORD), ]) - SECURITY_ATTRIBUTES = rffi_platform.Struct( - 'SECURITY_ATTRIBUTES', [ - ("nLength", DWORD), - ("lpSecurityDescriptor", LPVOID), - ("bInheritHandle", BOOL), - ]) - config = rffi_platform.configure(ComplexCConfig) SYSTEM_INFO = config['SYSTEM_INFO'] SYSTEM_INFO_P = lltype.Ptr(SYSTEM_INFO) @@ -182,18 +176,12 @@ GetFileSize = winexternal('GetFileSize', [HANDLE, LPDWORD], DWORD) GetCurrentProcess = winexternal('GetCurrentProcess', [], HANDLE) DuplicateHandle = winexternal('DuplicateHandle', [HANDLE, HANDLE, HANDLE, LPHANDLE, DWORD, BOOL, DWORD], BOOL) - CreateFileMapping = winexternal('CreateFileMappingA', [HANDLE, LPSECURITY_ATTRIBUTES, DWORD, DWORD, DWORD, LPCSTR], HANDLE) + CreateFileMapping = winexternal('CreateFileMappingA', [HANDLE, rwin32.LPSECURITY_ATTRIBUTES, DWORD, DWORD, DWORD, LPCSTR], HANDLE) MapViewOfFile = winexternal('MapViewOfFile', [HANDLE, DWORD, DWORD, DWORD, SIZE_T], LPCSTR)##!!LPVOID) - CloseHandle = winexternal('CloseHandle', [HANDLE], BOOL) UnmapViewOfFile = winexternal('UnmapViewOfFile', [LPCVOID], BOOL) FlushViewOfFile = winexternal('FlushViewOfFile', [LPCVOID, SIZE_T], BOOL) SetFilePointer = winexternal('SetFilePointer', [HANDLE, LONG, PLONG, DWORD], DWORD) SetEndOfFile = winexternal('SetEndOfFile', [HANDLE], BOOL) - ##_get_osfhandle = winexternal('_get_osfhandle', [INT], LONG) - # casting from int to handle did not work, so I changed this - # but it should not be so! - _get_osfhandle = winexternal('_get_osfhandle', [INT], rffi.INTPTR_T) - GetLastError = winexternal('GetLastError', [], DWORD) VirtualAlloc = winexternal('VirtualAlloc', [rffi.VOIDP, rffi.SIZE_T, DWORD, DWORD], rffi.VOIDP) @@ -240,19 +228,14 @@ # low might just happen to have the value INVALID_FILE_SIZE # so we need to check the last error also INVALID_FILE_SIZE = -1 - NO_ERROR = 0 - dwErr = GetLastError() - err = rffi.cast(lltype.Signed, dwErr) - if low == INVALID_FILE_SIZE and err != NO_ERROR: - msg = os.strerror(err) - raise OSError(err, msg) + if low == INVALID_FILE_SIZE: + err = rwin32.GetLastError() + if err: + raise WindowsError(err, "mmap") return low, high finally: lltype.free(high_ref, flavor='raw') - def _get_error_no(): - return rffi.cast(lltype.Signed, GetLastError()) - INVALID_HANDLE = INVALID_HANDLE_VALUE PAGESIZE = _get_page_size() @@ -261,10 +244,11 @@ NODATA = lltype.nullptr(PTR.TO) class MMap(object): - def __init__(self, access): + def __init__(self, access, offset): self.size = 0 self.pos = 0 self.access = access + self.offset = offset if _MS_WINDOWS: self.map_handle = NULL_HANDLE @@ -303,10 +287,10 @@ self.unmapview() self.setdata(NODATA, 0) if self.map_handle != INVALID_HANDLE: - CloseHandle(self.map_handle) + rwin32.CloseHandle(self.map_handle) self.map_handle = INVALID_HANDLE if self.file_handle != INVALID_HANDLE: - CloseHandle(self.file_handle) + rwin32.CloseHandle(self.file_handle) self.file_handle = INVALID_HANDLE elif _POSIX: self.closed = True @@ -365,7 +349,7 @@ self.pos += len(res) return res - def find(self, tofind, start=0): + def find(self, tofind, start, end, reverse=False): self.check_valid() # XXX naive! how can we reuse the rstr algorithm? @@ -373,16 +357,39 @@ start += self.size if start < 0: start = 0 + if end < 0: + end += self.size + if end < 0: + end = 0 + elif end > self.size: + end = self.size + # + upto = end - len(tofind) + if not reverse: + step = 1 + p = start + if p > upto: + return -1 # failure (empty range to search) + else: + step = -1 + p = upto + upto = start + if p < upto: + return -1 # failure (empty range to search) + # data = self.data - for p in xrange(start, self.size - len(tofind) + 1): + while True: + assert p >= 0 for q in range(len(tofind)): if data[p+q] != tofind[q]: break # position 'p' is not a match else: # full match return p - # failure - return -1 + # + if p == upto: + return -1 # failure + p += step def seek(self, pos, whence=0): self.check_valid() @@ -482,7 +489,7 @@ ## new_size = size + value & (PAGESIZE - 1) res = c_msync(start, size, MS_SYNC) if res == -1: - errno = _get_error_no() + errno = rposix.get_errno() raise OSError(errno, os.strerror(errno)) return 0 @@ -511,7 +518,7 @@ raise OSError(-11111, "No mremap available") # resize the underlying file first - os.ftruncate(self.fd, newsize) + os.ftruncate(self.fd, self.offset + newsize) # now resize the mmap newdata = c_mremap(self.getptr(0), self.size, newsize, @@ -520,15 +527,19 @@ elif _MS_WINDOWS: # disconnect the mapping self.unmapview() - CloseHandle(self.map_handle) + rwin32.CloseHandle(self.map_handle) # move to the desired EOF position if _64BIT: - newsize_high = newsize >> 32 - newsize_low = newsize & 0xFFFFFFFF + newsize_high = (self.offset + newsize) >> 32 + newsize_low = (self.offset + newsize) & 0xFFFFFFFF + offset_high = self.offset >> 32 + offset_low = self.offset & 0xFFFFFFFF else: newsize_high = 0 - newsize_low = newsize + newsize_low = self.offset + newsize + offset_high = 0 + offset_low = self.offset FILE_BEGIN = 0 high_ref = lltype.malloc(PLONG.TO, 1, flavor='raw') @@ -548,19 +559,14 @@ dwErrCode = 0 if self.map_handle: data = MapViewOfFile(self.map_handle, FILE_MAP_WRITE, - 0, 0, 0) + offset_high, offset_low, newsize) if data: # XXX we should have a real LPVOID which must always be casted charp = rffi.cast(LPCSTR, data) self.setdata(charp, newsize) return - else: - dwErrCode = GetLastError() - else: - dwErrCode = GetLastError() - err = rffi.cast(lltype.Signed, dwErrCode) - raise OSError(err, os.strerror(err)) - + raise rwin32.lastWindowsError() + def len(self): self.check_valid() @@ -588,23 +594,25 @@ if size < 0: raise RTypeError("memory mapped size must be positive") if rffi.cast(size_t, size) != size: - raise OverflowError("memory mapped size is too large (limited by C int)") + raise ROverflowError("memory mapped size is too large (limited by C int)") if _POSIX: def mmap(fileno, length, flags=MAP_SHARED, - prot=PROT_WRITE | PROT_READ, access=_ACCESS_DEFAULT): + prot=PROT_WRITE | PROT_READ, access=_ACCESS_DEFAULT, offset=0): fd = fileno - # check size boundaries - _check_map_size(length) - map_size = length - # check access is not there when flags and prot are there if access != _ACCESS_DEFAULT and ((flags != MAP_SHARED) or\ (prot != (PROT_WRITE | PROT_READ))): raise RValueError("mmap can't specify both access and flags, prot.") + # check size boundaries + _check_map_size(length) + map_size = length + if offset < 0: + raise RValueError("negative offset") + if access == ACCESS_READ: flags = MAP_SHARED prot = PROT_READ @@ -630,6 +638,7 @@ else: mode = st[stat.ST_MODE] size = st[stat.ST_SIZE] + size -= offset if size > sys.maxint: size = sys.maxint else: @@ -640,7 +649,7 @@ elif map_size > size: raise RValueError("mmap length is greater than file size") - m = MMap(access) + m = MMap(access, offset) if fd == -1: # Assume the caller wants to map anonymous memory. # This is the same behaviour as Windows. mmap.mmap(-1, size) @@ -655,9 +664,9 @@ # XXX if we use hintp below in alloc, the NonConstant # is necessary since we want a general version of c_mmap # to be annotated with a non-constant pointer. - res = c_mmap(NonConstant(NULL), map_size, prot, flags, fd, 0) + res = c_mmap(NonConstant(NULL), map_size, prot, flags, fd, offset) if res == rffi.cast(PTR, -1): - errno = _get_error_no() + errno = rposix.get_errno() raise OSError(errno, os.strerror(errno)) m.setdata(res, map_size) @@ -692,10 +701,12 @@ free = c_munmap_safe elif _MS_WINDOWS: - def mmap(fileno, length, tagname="", access=_ACCESS_DEFAULT): + def mmap(fileno, length, tagname="", access=_ACCESS_DEFAULT, offset=0): # check size boundaries _check_map_size(length) map_size = length + if offset < 0: + raise RValueError("negative offset") flProtect = 0 dwDesiredAccess = 0 @@ -716,16 +727,15 @@ # assume -1 and 0 both mean invalid file descriptor # to 'anonymously' map memory. if fileno != -1 and fileno != 0: - res = _get_osfhandle(fileno) - if res == rffi.cast(rffi.SSIZE_T, INVALID_HANDLE): - errno = _get_error_no() + fh = rwin32._get_osfhandle(fileno) + if fh == INVALID_HANDLE: + errno = rposix.get_errno() raise OSError(errno, os.strerror(errno)) - fh = rffi.cast(HANDLE, res) # Win9x appears to need us seeked to zero # SEEK_SET = 0 # libc._lseek(fileno, 0, SEEK_SET) - m = MMap(access) + m = MMap(access, offset) m.file_handle = INVALID_HANDLE m.map_handle = INVALID_HANDLE if fh: @@ -742,8 +752,7 @@ False, # inherited by child procs? DUPLICATE_SAME_ACCESS) # options if not res: - errno = _get_error_no() - raise OSError(errno, os.strerror(errno)) + raise rwin32.lastWindowsError() m.file_handle = handle_ref[0] finally: lltype.free(handle_ref, flavor='raw') @@ -764,31 +773,29 @@ # DWORD is a 4-byte int. If int > 4-byte it must be divided if _64BIT: - size_hi = map_size >> 32 - size_lo = map_size & 0xFFFFFFFF + size_hi = (map_size + offset) >> 32 + size_lo = (map_size + offset) & 0xFFFFFFFF + offset_hi = offset >> 32 + offset_lo = offset & 0xFFFFFFFF else: size_hi = 0 - size_lo = map_size + size_lo = map_size + offset + offset_hi = 0 + offset_lo = offset m.map_handle = CreateFileMapping(m.file_handle, NULL, flProtect, size_hi, size_lo, m.tagname) if m.map_handle: - res = MapViewOfFile(m.map_handle, dwDesiredAccess, - 0, 0, 0) - if res: + data = MapViewOfFile(m.map_handle, dwDesiredAccess, + offset_hi, offset_lo, 0) + if data: # XXX we should have a real LPVOID which must always be casted - charp = rffi.cast(LPCSTR, res) + charp = rffi.cast(LPCSTR, data) m.setdata(charp, map_size) return m - else: - dwErr = GetLastError() - else: - dwErr = GetLastError() - err = rffi.cast(lltype.Signed, dwErr) - raise OSError(err, os.strerror(err)) + raise rwin32.lastWindowsError() - def alloc(map_size): """Allocate memory. This is intended to be used by the JIT, so the memory has the executable bit set. diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -271,6 +271,36 @@ r -= y return r +def op_uint_lshift(x, y): + assert isinstance(x, r_uint) + assert isinstance(y, int) + return r_uint(x << y) + +def op_uint_rshift(x, y): + assert isinstance(x, r_uint) + assert isinstance(y, int) + return r_uint(x >> y) + +def op_llong_lshift(x, y): + assert isinstance(x, r_longlong_arg) + assert isinstance(y, int) + return r_longlong_result(x << y) + +def op_llong_rshift(x, y): + assert isinstance(x, r_longlong_arg) + assert isinstance(y, int) + return r_longlong_result(x >> y) + +def op_ullong_lshift(x, y): + assert isinstance(x, r_ulonglong) + assert isinstance(y, int) + return r_ulonglong(x << y) + +def op_ullong_rshift(x, y): + assert isinstance(x, r_ulonglong) + assert isinstance(y, int) + return r_ulonglong(x >> y) + def op_same_as(x): return x diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -265,8 +265,8 @@ 'uint_ge': LLOp(canfold=True), 'uint_and': LLOp(canfold=True), 'uint_or': LLOp(canfold=True), - 'uint_lshift': LLOp(canfold=True), - 'uint_rshift': LLOp(canfold=True), + 'uint_lshift': LLOp(canfold=True), # args (r_uint, int) + 'uint_rshift': LLOp(canfold=True), # args (r_uint, int) 'uint_xor': LLOp(canfold=True), 'float_is_true': LLOp(canfold=True), # it really means "x != 0.0" @@ -288,9 +288,7 @@ 'llong_is_true': LLOp(canfold=True), 'llong_neg': LLOp(canfold=True), - 'llong_neg_ovf': LLOp(canraise=(OverflowError,), tryfold=True), 'llong_abs': LLOp(canfold=True), - 'llong_abs_ovf': LLOp(canraise=(OverflowError,), tryfold=True), 'llong_invert': LLOp(canfold=True), 'llong_add': LLOp(canfold=True), @@ -308,8 +306,8 @@ 'llong_ge': LLOp(canfold=True), 'llong_and': LLOp(canfold=True), 'llong_or': LLOp(canfold=True), - 'llong_lshift': LLOp(canfold=True), - 'llong_rshift': LLOp(canfold=True), + 'llong_lshift': LLOp(canfold=True), # args (r_longlong, int) + 'llong_rshift': LLOp(canfold=True), # args (r_longlong, int) 'llong_xor': LLOp(canfold=True), 'ullong_is_true': LLOp(canfold=True), @@ -330,8 +328,8 @@ 'ullong_ge': LLOp(canfold=True), 'ullong_and': LLOp(canfold=True), 'ullong_or': LLOp(canfold=True), - 'ullong_lshift': LLOp(canfold=True), - 'ullong_rshift': LLOp(canfold=True), + 'ullong_lshift': LLOp(canfold=True), # args (r_ulonglong, int) + 'ullong_rshift': LLOp(canfold=True), # args (r_ulonglong, int) 'ullong_xor': LLOp(canfold=True), 'cast_primitive': LLOp(canfold=True), @@ -470,7 +468,10 @@ # ^^^ but canunwindgc=False, as it is # allocating non-GC structures only 'gc_thread_run' : LLOp(), + 'gc_thread_start' : LLOp(), 'gc_thread_die' : LLOp(), + 'gc_thread_before_fork':LLOp(), # returns an opaque address + 'gc_thread_after_fork': LLOp(), # arguments: (result_of_fork, opaqueaddr) 'gc_assume_young_pointers': LLOp(canrun=True), 'gc_writebarrier_before_copy': LLOp(canrun=True), 'gc_heap_stats' : LLOp(canunwindgc=True), diff --git a/pypy/module/readline/test/__init__.py b/pypy/module/readline/test/__init__.py deleted file mode 100644 --- a/pypy/module/readline/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/objspace/std/test/helper.py b/pypy/objspace/std/test/helper.py deleted file mode 100644 --- a/pypy/objspace/std/test/helper.py +++ /dev/null @@ -1,69 +0,0 @@ -def raises(excp, func, *args): - try: - func(*args) - assert 1 == 0 - except excp:pass - -def assertEqual(a, b): - assert a == b - -def assertNotEqual(a, b): - assert a != b - -def assertIs(a, b): - assert a is b - -# complex specific tests - -EPS = 1e-9 - -def assertAlmostEqual(a, b): - if isinstance(a, complex): - if isinstance(b, complex): - assert a.real - b.real < EPS - assert a.imag - b.imag < EPS - else: - assert a.real - b < EPS - assert a.imag < EPS - else: - if isinstance(b, complex): - assert a - b.real < EPS - assert b.imag < EPS - else: - assert a - b < EPS - -def assertCloseAbs(x, y, eps=1e-9): - """Return true iff floats x and y "are close\"""" - # put the one with larger magnitude second - if abs(x) > abs(y): - x, y = y, x - if y == 0: - return abs(x) < eps - if x == 0: - return abs(y) < eps - # check that relative difference < eps - assert abs((x-y)/y) < eps - -def assertClose(x, y, eps=1e-9): - """Return true iff complexes x and y "are close\"""" - assertCloseAbs(x.real, y.real, eps) - assertCloseAbs(x.imag, y.imag, eps) - - -def check_div(x, y): - """Compute complex z=x*y, and check that z/x==y and z/y==x.""" - z = x * y - if x != 0: - q = z / x - assertClose(q, y) - q = z.__div__(x) - assertClose(q, y) - q = z.__truediv__(x) - assertClose(q, y) - if y != 0: - q = z / y - assertClose(q, x) - q = z.__div__(y) - assertClose(q, x) - q = z.__truediv__(y) - assertClose(q, x) diff --git a/pypy/module/readline/__init__.py b/pypy/module/readline/__init__.py deleted file mode 100644 --- a/pypy/module/readline/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.mixedmodule import MixedModule - -# XXX raw_input needs to check for space.readline_func and use -# it if its there - -class Module(MixedModule): - """Importing this module enables command line editing using GNU readline.""" - # the above line is the doc string of the translated module - - def setup_after_space_initialization(self): - from pypy.module.readline import c_readline - c_readline.setup_readline(self.space, self) - - interpleveldefs = { - 'readline' : 'interp_readline.readline', - } - - appleveldefs = { - 'parse_and_bind': 'app_stub.stub', - 'get_line_buffer': 'app_stub.stub_str', - 'insert_text': 'app_stub.stub', - 'read_init_file': 'app_stub.stub', - 'read_history_file': 'app_stub.stub', - 'write_history_file': 'app_stub.stub', - 'clear_history': 'app_stub.stub', - 'get_history_length': 'app_stub.stub_int', - 'set_history_length': 'app_stub.stub', - 'get_current_history_length': 'app_stub.stub_int', - 'get_history_item': 'app_stub.stub_str', - 'remove_history_item': 'app_stub.stub', - 'replace_history_item': 'app_stub.stub', - 'redisplay': 'app_stub.stub', - 'set_startup_hook': 'app_stub.stub', - 'set_pre_input_hook': 'app_stub.stub', - 'set_completer': 'app_stub.stub', - 'get_completer': 'app_stub.stub', - 'get_begidx': 'app_stub.stub_int', - 'get_endidx': 'app_stub.stub_int', - 'set_completer_delims': 'app_stub.stub', - 'get_completer_delims': 'app_stub.stub_str', - 'add_history': 'app_stub.stub', - } diff --git a/pypy/objspace/std/objspace.py b/pypy/objspace/std/objspace.py --- a/pypy/objspace/std/objspace.py +++ b/pypy/objspace/std/objspace.py @@ -22,7 +22,7 @@ from pypy.objspace.std.floatobject import W_FloatObject from pypy.objspace.std.intobject import W_IntObject from pypy.objspace.std.listobject import W_ListObject -from pypy.objspace.std.longobject import W_LongObject +from pypy.objspace.std.longobject import W_LongObject, newlong from pypy.objspace.std.noneobject import W_NoneObject from pypy.objspace.std.objectobject import W_ObjectObject from pypy.objspace.std.ropeobject import W_RopeObject @@ -177,6 +177,13 @@ #print 'wrapping', x, '->', w_result return w_result if isinstance(x, base_int): + if self.config.objspace.std.withsmalllong: + from pypy.objspace.std.smalllongobject import W_SmallLongObject + from pypy.rlib.rarithmetic import r_longlong, r_ulonglong + from pypy.rlib.rarithmetic import longlongmax + if (not isinstance(x, r_ulonglong) + or x <= r_ulonglong(longlongmax)): + return W_SmallLongObject(r_longlong(x)) x = widen(x) if isinstance(x, int): return self.newint(x) @@ -207,6 +214,16 @@ # The following cases are even stranger. # Really really only for tests. if type(x) is long: + if self.config.objspace.std.withsmalllong: + from pypy.rlib.rarithmetic import r_longlong + try: + rx = r_longlong(x) + except OverflowError: + pass + else: + from pypy.objspace.std.smalllongobject import \ + W_SmallLongObject + return W_SmallLongObject(rx) return W_LongObject.fromlong(x) if isinstance(x, slice): return W_SliceObject(self.wrap(x.start), @@ -269,10 +286,13 @@ return unpackcomplex(self, w_complex) def newlong(self, val): # val is an int + if self.config.objspace.std.withsmalllong: + from pypy.objspace.std.smalllongobject import W_SmallLongObject + return W_SmallLongObject.fromint(val) return W_LongObject.fromint(self, val) def newlong_from_rbigint(self, val): - return W_LongObject(val) + return newlong(self, val) def newtuple(self, list_w): assert isinstance(list_w, list) diff --git a/pypy/doc/config/objspace.usemodules.readline.txt b/pypy/doc/config/objspace.usemodules.readline.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.readline.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'readline' module. diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -1007,17 +1007,16 @@ return len(numb.nums) index = len(numb.nums) - 1 virtualizable = self.decode_ref(numb.nums[index]) - virtualizable = vinfo.cast_gcref_to_vtype(virtualizable) if self.resume_after_guard_not_forced == 1: # in the middle of handle_async_forcing() - assert virtualizable.vable_token - virtualizable.vable_token = vinfo.TOKEN_NONE + assert vinfo.gettoken(virtualizable) + vinfo.settoken(virtualizable, vinfo.TOKEN_NONE) else: # just jumped away from assembler (case 4 in the comment in # virtualizable.py) into tracing (case 2); check that vable_token # is and stays 0. Note the call to reset_vable_token() in # warmstate.py. - assert not virtualizable.vable_token + assert not vinfo.gettoken(virtualizable) return vinfo.write_from_resume_data_partial(virtualizable, self, numb) def load_value_of_type(self, TYPE, tagged): @@ -1158,7 +1157,7 @@ def decode_float(self, tagged): num, tag = untag(tagged) if tag == TAGCONST: - return self.consts[num].getfloat() + return self.consts[num].getfloatstorage() else: assert tag == TAGBOX if num < 0: diff --git a/pypy/module/readline/interp_readline.py b/pypy/module/readline/interp_readline.py deleted file mode 100644 --- a/pypy/module/readline/interp_readline.py +++ /dev/null @@ -1,23 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.baseobjspace import ObjSpace - -from pypy.module.readline import c_readline -from pypy.rpython.lltypesystem import rffi - -#------------------------------------------------------------ -# exported API (see interpleveldefs in __init__.py) -# -def readline(space, prompt): - return space.wrap(rffi.charp2str(c_readline.c_readline(prompt))) -readline.unwrap_spec = [ObjSpace, str] - -def setcompleter(space, w_callback): - """Set or remove the completer function. - The function is called as function(text, state), - for state in 0, 1, 2, ..., until it returns a non-string. - It should return the next possible completion starting with 'text'. - """ - # XXX set internal completion function - diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -4,6 +4,7 @@ from pypy.config.config import OptionDescription, BoolOption, IntOption, ArbitraryOption from pypy.config.config import ChoiceOption, StrOption, to_optparse, Config from pypy.config.config import ConflictConfigError +from pypy.config.translationoption import IS_64_BITS modulepath = py.path.local(__file__).dirpath().dirpath().join("module") all_modules = [p.basename for p in modulepath.listdir() @@ -26,7 +27,7 @@ # --allworkingmodules working_modules = default_modules.copy() working_modules.update(dict.fromkeys( - ["_socket", "unicodedata", "mmap", "fcntl", + ["_socket", "unicodedata", "mmap", "fcntl", "_locale", "rctime" , "select", "zipimport", "_lsprof", "crypt", "signal", "_rawffi", "termios", "zlib", "bz2", "struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO", @@ -91,6 +92,7 @@ "bz2" : ["pypy.module.bz2.interp_bz2"], "pyexpat" : ["pypy.module.pyexpat.interp_pyexpat"], "_ssl" : ["pypy.module._ssl.interp_ssl"], + "_hashlib" : ["pypy.module._ssl.interp_ssl"], "_minimal_curses": ["pypy.module._minimal_curses.fficurses"], } @@ -164,7 +166,7 @@ suggests=[("objspace.allworkingmodules", False)]), BoolOption("geninterp", "specify whether geninterp should be used", - default=True), + default=False), BoolOption("logbytecodes", "keep track of bytecode usage", @@ -212,6 +214,11 @@ IntOption("prebuiltintto", "highest integer which is prebuilt", default=100, cmdline="--prebuiltintto"), + BoolOption("withsmalllong", "use a version of 'long' in a C long long", + default=False, + requires=[("objspace.std.withsmallint", False)]), + # ^^^ because of missing delegate_xx2yy + BoolOption("withstrjoin", "use strings optimized for addition", default=False), @@ -345,6 +352,8 @@ config.objspace.std.suggest(optimized_list_getitem=True) config.objspace.std.suggest(getattributeshortcut=True) config.objspace.std.suggest(newshortcut=True) + if not IS_64_BITS: + config.objspace.std.suggest(withsmalllong=True) # extra costly optimizations only go in level 3 if level == '3': @@ -360,6 +369,8 @@ config.objspace.std.suggest(withmapdict=True) config.objspace.std.suggest(withstrslice=True) config.objspace.std.suggest(withstrjoin=True) + if not IS_64_BITS: + config.objspace.std.suggest(withsmalllong=True) # xxx other options? ropes maybe? # completely disable geninterp in a level 0 translation From commits-noreply at bitbucket.org Thu Mar 17 19:55:48 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 17 Mar 2011 19:55:48 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the test: it's normal for moving gc's to allocate an additional nonmoving buffer, Message-ID: <20110317185548.991F0282BD6@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42771:9ba5088910ee Date: 2011-03-17 18:05 +0100 http://bitbucket.org/pypy/pypy/changeset/9ba5088910ee/ Log: Fix the test: it's normal for moving gc's to allocate an additional nonmoving buffer, and keep it alive along with the c_char_p object. diff --git a/lib-python/modified-2.7.0/ctypes/test/test_internals.py b/lib-python/modified-2.7.0/ctypes/test/test_internals.py --- a/lib-python/modified-2.7.0/ctypes/test/test_internals.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_internals.py @@ -29,13 +29,18 @@ self.assertEqual(refcnt, grc(i)) self.assertEqual(ci._objects, None) - @xfail def test_c_char_p(self): s = "Hello, World" refcnt = grc(s) cs = c_char_p(s) self.assertEqual(refcnt + 1, grc(s)) - self.assertSame(cs._objects, s) + try: + # Moving gcs need to allocate a nonmoving buffer + cs._objects._obj + except AttributeError: + self.assertSame(cs._objects, s) + else: + self.assertSame(cs._objects._obj, s) def test_simple_struct(self): class X(Structure): From commits-noreply at bitbucket.org Thu Mar 17 19:55:49 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 17 Mar 2011 19:55:49 +0100 (CET) Subject: [pypy-svn] pypy default: This import is no more needed Message-ID: <20110317185549.22004282BD6@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42772:13158023c6c7 Date: 2011-03-17 18:05 +0100 http://bitbucket.org/pypy/pypy/changeset/13158023c6c7/ Log: This import is no more needed diff --git a/lib-python/modified-2.7.0/ctypes/test/test_internals.py b/lib-python/modified-2.7.0/ctypes/test/test_internals.py --- a/lib-python/modified-2.7.0/ctypes/test/test_internals.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_internals.py @@ -2,7 +2,6 @@ import unittest from ctypes import * from sys import getrefcount as grc -from ctypes.test import xfail # XXX This test must be reviewed for correctness!!! From commits-noreply at bitbucket.org Thu Mar 17 19:55:49 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 17 Mar 2011 19:55:49 +0100 (CET) Subject: [pypy-svn] pypy default: Skip failures on win32 Message-ID: <20110317185549.E4E23282BD6@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42773:61fefec7abc6 Date: 2011-03-17 18:08 +0100 http://bitbucket.org/pypy/pypy/changeset/61fefec7abc6/ Log: Skip failures on win32 diff --git a/lib-python/modified-2.7.0/ctypes/test/test_loading.py b/lib-python/modified-2.7.0/ctypes/test/test_loading.py --- a/lib-python/modified-2.7.0/ctypes/test/test_loading.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_loading.py @@ -2,7 +2,7 @@ import sys, unittest import os from ctypes.util import find_library -from ctypes.test import is_resource_enabled +from ctypes.test import is_resource_enabled, xfail libc_name = None if os.name == "nt": @@ -75,6 +75,7 @@ self.assertRaises(AttributeError, dll.__getitem__, 1234) if os.name == "nt": + @xfail def test_1703286_A(self): from _ctypes import LoadLibrary, FreeLibrary # On winXP 64-bit, advapi32 loads at an address that does @@ -85,6 +86,7 @@ handle = LoadLibrary("advapi32") FreeLibrary(handle) + @xfail def test_1703286_B(self): # Since on winXP 64-bit advapi32 loads like described # above, the (arbitrarily selected) CloseEventLog function diff --git a/lib-python/modified-2.7.0/ctypes/test/test_parameters.py b/lib-python/modified-2.7.0/ctypes/test/test_parameters.py --- a/lib-python/modified-2.7.0/ctypes/test/test_parameters.py +++ b/lib-python/modified-2.7.0/ctypes/test/test_parameters.py @@ -89,6 +89,8 @@ pa = c_wchar_p.from_param(c_wchar_p(u"123")) self.assertEqual(type(pa), c_wchar_p) + if sys.platform == "win32": + test_cw_strings = xfail(test_cw_strings) @xfail def test_int_pointers(self): From commits-noreply at bitbucket.org Fri Mar 18 01:18:57 2011 From: commits-noreply at bitbucket.org (ademan) Date: Fri, 18 Mar 2011 01:18:57 +0100 (CET) Subject: [pypy-svn] pypy micronumpy: Branch suffered from bad svn merge, micronumpy as it is here is mostly deprecated anyways. Message-ID: <20110318001857.0CF2C282BD7@codespeak.net> Author: Daniel Roberts Branch: micronumpy Changeset: r42774:fbb12ddc3b65 Date: 2011-03-17 17:18 -0700 http://bitbucket.org/pypy/pypy/changeset/fbb12ddc3b65/ Log: Branch suffered from bad svn merge, micronumpy as it is here is mostly deprecated anyways. From commits-noreply at bitbucket.org Fri Mar 18 10:59:21 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 10:59:21 +0100 (CET) Subject: [pypy-svn] pypy default: use communicate() to make sure that stdout/err are actually closed, else we leak file descriptors Message-ID: <20110318095921.C3EC3282BA1@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42775:491a8d97fba5 Date: 2011-03-18 10:58 +0100 http://bitbucket.org/pypy/pypy/changeset/491a8d97fba5/ Log: use communicate() to make sure that stdout/err are actually closed, else we leak file descriptors diff --git a/pypy/module/pypyjit/test_pypy_c/test_model.py b/pypy/module/pypyjit/test_pypy_c/test_model.py --- a/pypy/module/pypyjit/test_pypy_c/test_model.py +++ b/pypy/module/pypyjit/test_pypy_c/test_model.py @@ -51,9 +51,7 @@ env=env, stdout=subprocess.PIPE, stderr=subprocess.PIPE) - pipe.wait() - stderr = pipe.stderr.read() - stdout = pipe.stdout.read() + stdout, stderr = pipe.communicate() assert not stderr # # parse the JIT log From commits-noreply at bitbucket.org Fri Mar 18 11:01:57 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 11:01:57 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110318100157.85EFA282BA1@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42776:5d58c1ce7fc4 Date: 2011-03-18 10:58 +0100 http://bitbucket.org/pypy/pypy/changeset/5d58c1ce7fc4/ Log: merge heads diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - -.. sectnum:: -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: -.. sectnum:: - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` and ``make`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,237 +0,0 @@ -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,59 +0,0 @@ - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.txt deleted file mode 100644 --- a/pypy/doc/jit/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -======================================================================== - JIT documentation -======================================================================== - -:abstract: - - When PyPy is translated into an executable like ``pypy-c``, the - executable contains a full virtual machine that can optionally - include a Just-In-Time compiler. This JIT compiler is **generated - automatically from the interpreter** that we wrote in RPython. - - This JIT Compiler Generator can be applied on interpreters for any - language, as long as the interpreter itself is written in RPython - and contains a few hints to guide the JIT Compiler Generator. - - -Content ------------------------------------------------------------- - -- Overview_: motivating our approach - -- Notes_ about the current work in PyPy - - -.. _Overview: overview.html -.. _Notes: pyjitpl5.html diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/jit/pyjitpl5.txt b/pypy/doc/jit/pyjitpl5.txt deleted file mode 100644 --- a/pypy/doc/jit/pyjitpl5.txt +++ /dev/null @@ -1,179 +0,0 @@ -========== - PyJitPl5 -========== - -This document describes the fifth generation of PyPy's JIT. - - -Implementation of the JIT -========================= - -The JIT's `theory`_ is great in principle, but the actual code is a different -story. This section tries to give a high level overview of how PyPy's JIT is -implemented. It's helpful to have an understanding of how the PyPy `translation -tool chain`_ works before digging into the sources. - -Almost all JIT specific code is found in pypy/jit subdirectories. Translation -time code is in the codewriter directory. The metainterp directory holds -platform independent code including the the tracer and the optimizer. Code in -the backend directory is responsible for generating machine code. - -.. _`theory`: overview.html -.. _`translation tool chain`: ../translation.html - - -JIT hints ---------- - -To add a JIT to an interpreter, PyPy only requires that two hints be added to -the target interpreter. These are jit_merge_point and can_enter_jit. -jit_merge_point is supposed to go at the start of opcode dispatch. It allows -the JIT to bail back to the interpreter in case running machine code is no -longer suitable. can_enter_jit goes at the end of a application level loop. In -the Python interpreter, this is the JUMP_ABSOLUTE bytecode. The Python -interpreter defines its hints in pypy/module/pypyjit/interp_jit.py in a few -overridden methods of the default interpreter loop. - -An interpreter wishing to use the PyPy's JIT must define a list of *green* -variables and a list of *red* variables. The *green* variables are loop -constants. They are used to identify the current loop. Red variables are for -everything else used in the execution loop. For example, the Python interpreter -passes the code object and the instruction pointer as greens and the frame -object and execution context as reds. These objects are passed to the JIT at -the location of the JIT hints. - - -JIT Generation --------------- - -After the RTyping phase of translation, where high level Python operations are -turned into low-level ones for the backend, the translation driver calls -apply_jit() in metainterp/warmspot.py to add a JIT compiler to the currently -translating interpreter. apply_jit() decides what assembler backend to use then -delegates the rest of the work to the WarmRunnerDesc class. WarmRunnerDesc -finds the two JIT hints in the function graphs. It rewrites the graph -containing the jit_merge_point hint, called the portal graph, to be able to -handle special JIT exceptions, which indicate special conditions to the -interpreter upon exiting from the JIT. The location of the can_enter_jit hint -is replaced with a call to a function, maybe_compile_and_run in warmstate.py, -that checks if current loop is "hot" and should be compiled. - -Next, starting with the portal graph, codewriter/\*.py converts the graphs of the -interpreter into JIT bytecode. Since this bytecode is stored in the final -binary, it's designed to be concise rather than fast. The bytecode codewriter -doesn't "see" (what it sees is defined by the JIT's policy) every part of the -interpreter. In these cases, it simply inserts an opaque call. - -Finally, translation finishes, including the bytecode of the interpreter in the -final binary, and interpreter is ready to use the runtime component of the JIT. - - -Tracing -------- - -Application code running on the JIT-enabled interpreter starts normally; it is -interpreted on top of the usual evaluation loop. When an application loop is -closed (where the can_enter_jit hint was), the interpreter calls the -maybe_compile_and_run() method of WarmEnterState. This method increments a -counter associated with the current green variables. When this counter reaches -a certain level, usually indicating the application loop has been run many -times, the JIT enters tracing mode. - -*Tracing* is where JIT interprets the bytecode, generated at translation time, -of the interpreter interpreting the application level code. This allows it to -see the exact operations that make up the application level loop. Tracing is -performed by MetaInterp and MIFrame classes in metainterp/pyjitpl.py. -maybe_compile_and_run() creates a MetaInterp and calls its -compile_and_run_once() method. This initializes the MIFrame for the input -arguments of the loop, the red and green variables passed from the -jit_merge_point hint, and sets it to start interpreting the bytecode of the -portal graph. - -Before starting the interpretation, the loop input arguments are wrapped in a -*box*. Boxes (defined in metainterp/history.py) wrap the value and type of a -value in the program the JIT is interpreting. There are two main varieties of -boxes: constant boxes and normal boxes. Constant boxes are used for values -assumed to be known during tracing. These are not necessarily compile time -constants. All values which are "promoted", assumed to be constant by the JIT -for optimization purposes, are also stored in constant boxes. Normal boxes -contain values that may change during the running of a loop. There are three -kinds of normal boxes: BoxInt, BoxPtr, and BoxFloat, and four kinds of constant -boxes: ConstInt, ConstPtr, ConstFloat, and ConstAddr. (ConstAddr is only used -to get around a limitation in the translation toolchain.) - -The meta-interpreter starts interpreting the JIT bytecode. Each operation is -executed and then recorded in a list of operations, called the trace. -Operations can have a list of boxes that operate on, arguments. Some operations -(like GETFIELD and GETARRAYITEM) also have special objects that describe how -their arguments are laid out in memory. All possible operations generated by -tracing are listed in metainterp/resoperation.py. When a (interpreter-level) -call to a function the JIT has bytecode for occurs during tracing, another -MIFrame is added to the stack and the tracing continues with the same history. -This flattens the list of operations over calls. Most importantly, it unrolls -the opcode dispatch loop. Interpretation continues until the can_enter_jit hint -is seen. At this point, a whole iteration of the application level loop has -been seen and recorded. - -Because only one iteration has been recorded the JIT only knows about one -codepath in the loop. For example, if there's a if statement construct like -this:: - - if x: - do_something_exciting() - else: - do_something_else() - -and ``x`` is true when the JIT does tracing, only the codepath -``do_something_exciting`` will be added to the trace. In future runs, to ensure -that this path is still valid, a special operation called a *guard operation* is -added to the trace. A guard is a small test that checks if assumptions the JIT -makes during tracing are still true. In the example above, a GUARD_TRUE guard -will be generated for ``x`` before running ``do_something_exciting``. - -Once the meta-interpreter has verified that it has traced a loop, it decides how -to compile what it has. There is an optional optimization phase between these -actions which is covered future down this page. The backend converts the trace -operations into assembly for the particular machine. It then hands the compiled -loop back to the frontend. The next time the loop is seen in application code, -the optimized assembly can be run instead of the normal interpreter. - - -Optimizations -------------- - -The JIT employs several techniques, old and new, to make machine code run -faster. - -Virtuals and Virtualizables -*************************** - -A *virtual* value is an array, struct, or RPython level instance that is created -during the loop and does not escape from it via calls or longevity past the -loop. Since it is only used by the JIT, it be "optimized out"; the value -doesn't have to be allocated at all and its fields can be stored as first class -values instead of deferencing them in memory. Virtuals allow temporary objects -in the interpreter to be unwrapped. For example, a W_IntObject in the PyPy can -be unwrapped to just be its integer value as long as the object is known not to -escape the machine code. - -A *virtualizable* is similar to a virtual in that its structure is optimized out -in the machine code. Virtualizables, however, can escape from JIT controlled -code. - -Most of the JIT's optimizer is contained 2 files optimizefindnodes.py and -optimizeopt.py. - - -More resources -============== - -More documentation about the current JIT is available as a first published -article: - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`__ - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit-final.pdf - -as well as the `blog posts with the JIT tag.`__ - -.. __: http://morepypy.blogspot.com/search/label/jit diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,123 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. contents:: -.. sectnum:: - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/jit/overview.txt b/pypy/doc/jit/overview.txt deleted file mode 100644 --- a/pypy/doc/jit/overview.txt +++ /dev/null @@ -1,195 +0,0 @@ ------------------------------------------------------------------------- - Motivating JIT Compiler Generation ------------------------------------------------------------------------- - -.. contents:: -.. sectnum:: - -This is a non-technical introduction and motivation for PyPy's approach -to Just-In-Time compiler generation. - - -Motivation -======================================================================== - -Overview --------- - -Writing an interpreter for a complex dynamic language like Python is not -a small task, especially if, for performance goals, we want to write a -Just-in-Time (JIT) compiler too. - -The good news is that it's not what we did. We indeed wrote an -interpreter for Python, but we never wrote any JIT compiler for Python -in PyPy. Instead, we use the fact that our interpreter for Python is -written in RPython, which is a nice, high-level language -- and we turn -it *automatically* into a JIT compiler for Python. - -This transformation is of course completely transparent to the user, -i.e. the programmer writing Python programs. The goal (which we -achieved) is to support *all* Python features -- including, for example, -random frame access and debuggers. But it is also mostly transparent to -the language implementor, i.e. to the source code of the Python -interpreter. It only needs a bit of guidance: we had to put a small -number of hints in the source code of our interpreter. Based on these -hints, the *JIT compiler generator* produces a JIT compiler which has -the same language semantics as the original interpreter by construction. -This JIT compiler itself generates machine code at runtime, aggressively -optimizing the user's program and leading to a big performance boost, -while keeping the semantics unmodified. Of course, the interesting bit -is that our Python language interpreter can evolve over time without -getting out of sync with the JIT compiler. - - -The path we followed --------------------- - -Our previous incarnations of PyPy's JIT generator were based on partial -evaluation. This is a well-known and much-researched topic, considered -to be very promising. There have been many attempts to use it to -automatically transform an interpreter into a compiler. However, none of -them have lead to substantial speedups for real-world languages. We -believe that the missing key insight is to use partial evaluation to -produce just-in-time compilers, rather than classical ahead-of-time -compilers. If this turns out to be correct, the practical speed of -dynamic languages could be vastly improved. - -All these previous JIT compiler generators were producing JIT compilers -similar to the hand-written Psyco. But today, starting from 2009, our -prototype is no longer using partial evaluation -- at least not in a way -that would convince paper reviewers. It is instead based on the notion -of *tracing JIT,* recently studied for Java and JavaScript. When -compared to all existing tracing JITs so far, however, partial -evaluation gives us some extra techniques that we already had in our -previous JIT generators, notably how to optimize structures by removing -allocations. - -The closest comparison to our current JIT is Tamarin's TraceMonkey. -However, this JIT compiler is written manually, which is quite some -effort. In PyPy, we write a JIT generator at the level of RPython, -which means that our final JIT does not have to -- indeed, cannot -- be -written to encode all the details of the full Python language. These -details are automatically supplied by the fact that we have an -interpreter for full Python. - - -Practical results ------------------ - -The JIT compilers that we generate use some techniques that are not in -widespread use so far, but they are not exactly new either. The point -we want to make here is not that we are pushing the theoretical limits -of how fast a given dynamic language can be run. Our point is: we are -making it **practical** to have reasonably good Just-In-Time compilers -for all dynamic languages, no matter how complicated or non-widespread -(e.g. Open Source dynamic languages without large industry or academic -support, or internal domain-specific languages). By practical we mean -that this should be: - -* Easy: requires little more efforts than writing the interpreter in the - first place. - -* Maintainable: our generated JIT compilers are not separate projects - (we do not generate separate source code, but only throw-away C code - that is compiled into the generated VM). In other words, the whole - JIT compiler is regenerated anew every time the high-level interpreter - is modified, so that they cannot get out of sync no matter how fast - the language evolves. - -* Fast enough: we can get some rather good performance out of the - generated JIT compilers. That's the whole point, of course. - - -Alternative approaches to improve speed -======================================================================== - -+----------------------------------------------------------------------+ -| :NOTE: | -| | -| Please take the following section as just a statement of opinion. | -| In order to be debated over, the summaries should first be | -| expanded into full arguments. We include them here as links; | -| we are aware of them, even if sometimes pessimistic about them | -| ``:-)`` | -+----------------------------------------------------------------------+ - -There are a large number of approaches to improving the execution speed of -dynamic programming languages, most of which only produce small improvements -and none offer the flexibility and customisability provided by our approach. -Over the last 6 years of tweaking, the speed of CPython has only improved by a -factor of 1.3 or 1.4 (depending on benchmarks). Many tweaks are applicable to -PyPy as well. Indeed, some of the CPython tweaks originated as tweaks for PyPy. - -IronPython initially achieved a speed of about 1.8 times that of CPython by -leaving out some details of the language and by leveraging the large investment -that Microsoft has put into making the .NET platform fast; the current, more -complete implementation has roughly the same speed as CPython. In general, the -existing approaches have reached the end of the road, speed-wise. Microsoft's -Dynamic Language Runtime (DLR), often cited in this context, is essentially -only an API to make the techniques pioneered in IronPython official. At best, -it will give another small improvement. - -Another technique regularly mentioned is adding types to the language in order -to speed it up: either explicit optional typing or soft typing (i.e., inferred -"likely" types). For Python, all projects in this area have started with a -simplified subset of the language; no project has scaled up to anything close -to the complete language. This would be a major effort and be platform- and -language-specific. Moreover maintenance would be a headache: we believe that -many changes that are trivial to implement in CPython, are likely to invalidate -previous carefully-tuned optimizations. - -For major improvements in speed, JIT techniques are necessary. For Python, -Psyco gives typical speedups of 2 to 4 times - up to 100 times in algorithmic -examples. It has come to a dead end because of the difficulty and huge costs -associated with developing and maintaining it. It has a relatively poor -encoding of language semantics - knowledge about Python behavior needs to be -encoded by hand and kept up-to-date. At least, Psyco works correctly even when -encountering one of the numerous Python constructs it does not support, by -falling back to CPython. The PyPy JIT started out as a metaprogrammatic, -non-language-specific equivalent of Psyco. - -A different kind of prior art are self-hosting JIT compilers such as Jikes. -Jikes is a JIT compiler for Java written in Java. It has a poor encoding of -language semantics; it would take an enormous amount of work to encode all the -details of a Python-like language directly into a JIT compiler. It also has -limited portability, which is an issue for Python; it is likely that large -parts of the JIT compiler would need retargetting in order to run in a -different environment than the intended low-level one. - -Simply reusing an existing well-tuned JIT like that of the JVM does not -really work, because of concept mismatches between the implementor's -language and the host VM language: the former needs to be compiled to -the target environment in such a way that the JIT is able to speed it up -significantly - an approach which essentially has failed in Python so -far: even though CPython is a simple interpreter, its Java and .NET -re-implementations are not significantly faster. - -More recently, several larger projects have started in the JIT area. For -instance, Sun Microsystems is investing in JRuby, which aims to use the Java -Hotspot JIT to improve the performance of Ruby. However, this requires a lot of -hand crafting and will only provide speedups for one language on one platform. -Some issues are delicate, e.g., how to remove the overhead of constantly boxing -and unboxing, typical in dynamic languages. An advantage compared to PyPy is -that there are some hand optimizations that can be performed, that do not fit -in the metaprogramming approach. But metaprogramming makes the PyPy JIT -reusable for many different languages on many different execution platforms. -It is also possible to combine the approaches - we can get substantial speedups -using our JIT and then feed the result to Java's Hotspot JIT for further -improvement. One of us is even a member of the `JSR 292`_ Expert Group -to define additions to the JVM to better support dynamic languages, and -is contributing insights from our JIT research, in ways that will also -benefit PyPy. - -Finally, tracing JITs are now emerging for dynamic languages like -JavaScript with TraceMonkey. The code generated by PyPy is very similar -(but not hand-written) to the concepts of tracing JITs. - - -Further reading -======================================================================== - -The description of the current PyPy JIT generator is given in PyJitPl5_ -(draft). - -.. _`JSR 292`: http://jcp.org/en/jsr/detail?id=292 -.. _PyJitPl5: pyjitpl5.html diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: -.. sectnum:: - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: -.. sectnum:: - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: -.. sectnum:: - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Fri Mar 18 11:52:30 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 11:52:30 +0100 (CET) Subject: [pypy-svn] pypy default: port and adapt test_array_sum from test_pypy_c Message-ID: <20110318105230.04C6B36C205@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42777:6dfd34bb257f Date: 2011-03-18 11:43 +0100 http://bitbucket.org/pypy/pypy/changeset/6dfd34bb257f/ Log: port and adapt test_array_sum from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -946,3 +946,31 @@ return sa """ % (e1, e2) self.run_and_check(src, threshold=400) + + def test_array_sum(self): + def main(): + from array import array + img = array("i", range(128) * 5) * 480 + l, i = 0, 0 + while i < 640 * 480: + l += img[i] + i += 1 + return l + # + log = self.run(main, []) + assert log.result == 19507200 + loop, = log.loops_by_filename(self.filepath) + import pdb;pdb.set_trace() + assert loop.match(""" + i12 = int_lt(i7, 307200) + guard_true(i12, descr=) + # XXX: this is suboptimal, we could avoid this extra guard because i9==307200 + i13 = int_lt(i7, i9) + guard_true(i13, descr=) + i15 = getarrayitem_raw(i10, i7, descr=) + i16 = int_add_ovf(i8, i15) + guard_no_overflow(descr=) + i18 = int_add(i7, 1) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, p6, i18, i16, i9, i10, descr=) + """) From commits-noreply at bitbucket.org Fri Mar 18 11:52:31 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 11:52:31 +0100 (CET) Subject: [pypy-svn] pypy default: remove the XXX, it does not make any sense :-) Message-ID: <20110318105231.7FD8436C205@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42778:cd14b43b01e4 Date: 2011-03-18 11:52 +0100 http://bitbucket.org/pypy/pypy/changeset/cd14b43b01e4/ Log: remove the XXX, it does not make any sense :-) diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -964,7 +964,6 @@ assert loop.match(""" i12 = int_lt(i7, 307200) guard_true(i12, descr=) - # XXX: this is suboptimal, we could avoid this extra guard because i9==307200 i13 = int_lt(i7, i9) guard_true(i13, descr=) i15 = getarrayitem_raw(i10, i7, descr=) From commits-noreply at bitbucket.org Fri Mar 18 13:20:45 2011 From: commits-noreply at bitbucket.org (lac) Date: Fri, 18 Mar 2011 13:20:45 +0100 (CET) Subject: [pypy-svn] pypy default: pypy/doc/temp_index.rst was leftover junk from the sphinxification which Message-ID: <20110318122045.2C62C282BA1@codespeak.net> Author: Laura Creighton Branch: Changeset: r42779:f9ce284a688a Date: 2011-03-17 19:23 +0100 http://bitbucket.org/pypy/pypy/changeset/f9ce284a688a/ Log: pypy/doc/temp_index.rst was leftover junk from the sphinxification which should not have been moved. diff --git a/pypy/doc/temp_index.rst b/pypy/doc/temp_index.rst deleted file mode 100644 --- a/pypy/doc/temp_index.rst +++ /dev/null @@ -1,20 +0,0 @@ -.. PyPy documentation master file, created by - sphinx-quickstart on Mon Mar 14 10:44:41 2011. - You can adapt this file completely to your liking, but it should at least - contain the root `toctree` directive. - -Welcome to PyPy's documentation! -================================ - -Contents: - -.. toctree:: - :maxdepth: 2 - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` - From commits-noreply at bitbucket.org Fri Mar 18 13:20:45 2011 From: commits-noreply at bitbucket.org (lac) Date: Fri, 18 Mar 2011 13:20:45 +0100 (CET) Subject: [pypy-svn] pypy default: change .txt to .rst Message-ID: <20110318122045.DC30D282BA1@codespeak.net> Author: Laura Creighton Branch: Changeset: r42780:5dfd2fa3a69c Date: 2011-03-18 13:18 +0100 http://bitbucket.org/pypy/pypy/changeset/5dfd2fa3a69c/ Log: change .txt to .rst diff --git a/pypy/config/test/test_pypyoption.py b/pypy/config/test/test_pypyoption.py --- a/pypy/config/test/test_pypyoption.py +++ b/pypy/config/test/test_pypyoption.py @@ -70,6 +70,6 @@ prefix = descr._name c = Config(descr) for path in c.getpaths(include_groups=True): - fn = prefix + "." + path + ".txt" + fn = prefix + "." + path + ".rst" yield check_file_exists, fn From commits-noreply at bitbucket.org Fri Mar 18 13:20:46 2011 From: commits-noreply at bitbucket.org (lac) Date: Fri, 18 Mar 2011 13:20:46 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110318122046.3C508282BAA@codespeak.net> Author: Laura Creighton Branch: Changeset: r42781:53c2426b09b8 Date: 2011-03-18 13:19 +0100 http://bitbucket.org/pypy/pypy/changeset/53c2426b09b8/ Log: merge heads From commits-noreply at bitbucket.org Fri Mar 18 13:32:24 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Fri, 18 Mar 2011 13:32:24 +0100 (CET) Subject: [pypy-svn] pypy default: translator: remove unused list instanciation in the isinstance simplifier Message-ID: <20110318123224.1DF0736C20C@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42782:868496e73f01 Date: 2011-03-18 13:30 +0100 http://bitbucket.org/pypy/pypy/changeset/868496e73f01/ Log: translator: remove unused list instanciation in the isinstance simplifier diff --git a/pypy/translator/simplify.py b/pypy/translator/simplify.py --- a/pypy/translator/simplify.py +++ b/pypy/translator/simplify.py @@ -81,7 +81,6 @@ return for i in range(len(block.operations) - 1, -1, -1): op = block.operations[i] - insert = [] if op.opname == "isinstance": args = [constant_isinstance, op.args[0], op.args[1]] new_op = SpaceOperation("simple_call", args, op.result) From commits-noreply at bitbucket.org Fri Mar 18 14:09:44 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Fri, 18 Mar 2011 14:09:44 +0100 (CET) Subject: [pypy-svn] pypy default: fix failing tests after sphinx-merge by removing py.test Message-ID: <20110318130944.98881282BAA@codespeak.net> Author: holger krekel Branch: Changeset: r42783:13ef5b1f6617 Date: 2011-03-18 14:08 +0100 http://bitbucket.org/pypy/pypy/changeset/13ef5b1f6617/ Log: fix failing tests after sphinx-merge by removing py.test support for running documentation diff --git a/pypy/doc/redirections b/pypy/doc/redirections deleted file mode 100644 --- a/pypy/doc/redirections +++ /dev/null @@ -1,10 +0,0 @@ -# please make sure this is evaluable -{ - 'proxy.html': 'objspace-proxies.html#tproxy', - 'news.html': 'index.html', - 'contact.html': 'index.html', - 'home.html': 'index.html', - 'jit.html': 'jit/index.html', - 'standalone-howto.html': 'faq.html#pypy-translation-tool-chain', - 'dynamic-language-translation.html': 'http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf', -} diff --git a/pypy/doc/conftest.py b/pypy/doc/conftest.py deleted file mode 100644 --- a/pypy/doc/conftest.py +++ /dev/null @@ -1,29 +0,0 @@ -import py - -from pypy.config.makerestdoc import register_config_role -docdir = py.path.local(__file__).dirpath() - -pytest_plugins = "pypy.doc.pytest_restdoc" - -def pytest_addoption(parser): - group = parser.getgroup("pypy-doc options") - group.addoption('--pypy-doctests', action="store_true", - dest="pypy_doctests", default=False, - help="enable doctests in .txt files") - group.addoption('--generate-redirections', - action="store_true", dest="generateredirections", - default=True, help="Generate redirecting HTML files") - -def pytest_configure(config): - register_config_role(docdir) - -def pytest_doctest_prepare_content(content): - if not py.test.config.getvalue("pypy_doctests"): - py.test.skip("specify --pypy-doctests to run doctests") - l = [] - for line in content.split("\n"): - if line.find('>>>>') != -1: - line = "" - l.append(line) - return "\n".join(l) - diff --git a/pypy/doc/pytest_restdoc.py b/pypy/doc/pytest_restdoc.py deleted file mode 100644 --- a/pypy/doc/pytest_restdoc.py +++ /dev/null @@ -1,434 +0,0 @@ -""" -perform ReST syntax, local and remote reference tests on .rst/.txt files. -""" -import py -import sys, os, re - -def pytest_addoption(parser): - group = parser.getgroup("ReST", "ReST documentation check options") - group.addoption('-R', '--urlcheck', - action="store_true", dest="urlcheck", default=False, - help="urlopen() remote links found in ReST text files.") - group.addoption('--urltimeout', action="store", metavar="secs", - type="int", dest="urlcheck_timeout", default=5, - help="timeout in seconds for remote urlchecks") - group.addoption('--forcegen', - action="store_true", dest="forcegen", default=False, - help="force generation of html files.") - -def pytest_collect_file(path, parent): - if path.ext in (".txt", ".rst"): - project = getproject(path) - if project is not None: - return ReSTFile(path, parent=parent, project=project) - -def getproject(path): - for parent in path.parts(reverse=True): - confrest = parent.join("confrest.py") - if confrest.check(): - Project = confrest.pyimport().Project - return Project(parent) - -class ReSTFile(py.test.collect.File): - def __init__(self, fspath, parent, project): - super(ReSTFile, self).__init__(fspath=fspath, parent=parent) - self.project = project - - def collect(self): - return [ - ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project), - LinkCheckerMaker("checklinks", parent=self), - DoctestText("doctest", parent=self), - ] - -def deindent(s, sep='\n'): - leastspaces = -1 - lines = s.split(sep) - for line in lines: - if not line.strip(): - continue - spaces = len(line) - len(line.lstrip()) - if leastspaces == -1 or spaces < leastspaces: - leastspaces = spaces - if leastspaces == -1: - return s - for i, line in enumerate(lines): - if not line.strip(): - lines[i] = '' - else: - lines[i] = line[leastspaces:] - return sep.join(lines) - -class ReSTSyntaxTest(py.test.collect.Item): - def __init__(self, name, parent, project): - super(ReSTSyntaxTest, self).__init__(name=name, parent=parent) - self.project = project - - def reportinfo(self): - return self.fspath, None, "syntax check" - - def runtest(self): - self.restcheck(py.path.svnwc(self.fspath)) - - def restcheck(self, path): - py.test.importorskip("docutils") - self.register_linkrole() - from docutils.utils import SystemMessage - try: - self._checkskip(path, self.project.get_htmloutputpath(path)) - self.project.process(path) - except KeyboardInterrupt: - raise - except SystemExit, error: - if error.message == "ERROR: dot not found": - py.test.skip("system doesn't have graphviz installed") - return - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") - - def register_linkrole(self): - #directive.register_linkrole('api', self.resolve_linkrole) - #directive.register_linkrole('source', self.resolve_linkrole) -# -# # XXX fake sphinx' "toctree" and refs -# directive.register_linkrole('ref', self.resolve_linkrole) - - from docutils.parsers.rst import directives - def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - toctree_directive.content = 1 - toctree_directive.options = {'maxdepth': int, 'glob': directives.flag, - 'hidden': directives.flag} - directives.register_directive('toctree', toctree_directive) - self.register_pygments() - - def register_pygments(self): - # taken from pygments-main/external/rst-directive.py - from docutils.parsers.rst import directives - try: - from pygments.formatters import HtmlFormatter - except ImportError: - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - pygments_directive.options = {} - else: - # The default formatter - DEFAULT = HtmlFormatter(noclasses=True) - # Add name -> formatter pairs for every variant you want to use - VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), - } - - from docutils import nodes - - from pygments import highlight - from pygments.lexers import get_lexer_by_name, TextLexer - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight('\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - - pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - directives.register_directive('sourcecode', pygments_directive) - - def resolve_linkrole(self, name, text, check=True): - apigen_relpath = self.project.apigen_relpath - - if name == 'api': - if text == 'py': - return ('py', apigen_relpath + 'api/index.html') - else: - assert text.startswith('py.'), ( - 'api link "%s" does not point to the py package') % (text,) - dotted_name = text - if dotted_name.find('(') > -1: - dotted_name = dotted_name[:text.find('(')] - # remove pkg root - path = dotted_name.split('.')[1:] - dotted_name = '.'.join(path) - obj = py - if check: - for chunk in path: - try: - obj = getattr(obj, chunk) - except AttributeError: - raise AssertionError( - 'problem with linkrole :api:`%s`: can not resolve ' - 'dotted name %s' % (text, dotted_name,)) - return (text, apigen_relpath + 'api/%s.html' % (dotted_name,)) - elif name == 'source': - assert text.startswith('py/'), ('source link "%s" does not point ' - 'to the py package') % (text,) - relpath = '/'.join(text.split('/')[1:]) - if check: - pkgroot = py._pydir - abspath = pkgroot.join(relpath) - assert pkgroot.join(relpath).check(), ( - 'problem with linkrole :source:`%s`: ' - 'path %s does not exist' % (text, relpath)) - if relpath.endswith('/') or not relpath: - relpath += 'index.html' - else: - relpath += '.html' - return (text, apigen_relpath + 'source/%s' % (relpath,)) - elif name == 'ref': - return ("", "") - - def _checkskip(self, lpath, htmlpath=None): - if not self.config.getvalue("forcegen"): - lpath = py.path.local(lpath) - if htmlpath is not None: - htmlpath = py.path.local(htmlpath) - if lpath.ext == '.txt': - htmlpath = htmlpath or lpath.new(ext='.html') - if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): - py.test.skip("html file is up to date, use --forcegen to regenerate") - #return [] # no need to rebuild - -class DoctestText(py.test.collect.Item): - def reportinfo(self): - return self.fspath, None, "doctest" - - def runtest(self): - content = self._normalize_linesep() - newcontent = self.config.hook.pytest_doctest_prepare_content(content=content) - if newcontent is not None: - content = newcontent - s = content - l = [] - prefix = '.. >>> ' - mod = py.std.types.ModuleType(self.fspath.purebasename) - skipchunk = False - for line in deindent(s).split('\n'): - stripped = line.strip() - if skipchunk and line.startswith(skipchunk): - py.builtin.print_("skipping", line) - continue - skipchunk = False - if stripped.startswith(prefix): - try: - py.builtin.exec_(py.code.Source( - stripped[len(prefix):]).compile(), mod.__dict__) - except ValueError: - e = sys.exc_info()[1] - if e.args and e.args[0] == "skipchunk": - skipchunk = " " * (len(line) - len(line.lstrip())) - else: - raise - else: - l.append(line) - docstring = "\n".join(l) - mod.__doc__ = docstring - failed, tot = py.std.doctest.testmod(mod, verbose=1) - if failed: - py.test.fail("doctest %s: %s failed out of %s" %( - self.fspath, failed, tot)) - - def _normalize_linesep(self): - # XXX quite nasty... but it works (fixes win32 issues) - s = self.fspath.read() - linesep = '\n' - if '\r' in s: - if '\n' not in s: - linesep = '\r' - else: - linesep = '\r\n' - s = s.replace(linesep, '\n') - return s - -class LinkCheckerMaker(py.test.collect.Collector): - def collect(self): - return list(self.genlinkchecks()) - - def genlinkchecks(self): - path = self.fspath - # generating functions + args as single tests - timeout = self.config.getvalue("urlcheck_timeout") - for lineno, line in enumerate(path.readlines()): - line = line.strip() - if line.startswith('.. _'): - if line.startswith('.. _`'): - delim = '`:' - else: - delim = ':' - l = line.split(delim, 1) - if len(l) != 2: - continue - tryfn = l[1].strip() - name = "%s:%d" %(tryfn, lineno) - if tryfn.startswith('http:') or tryfn.startswith('https'): - if self.config.getvalue("urlcheck"): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno, timeout), checkfunc=urlcheck) - elif tryfn.startswith('webcal:'): - continue - else: - i = tryfn.find('#') - if i != -1: - checkfn = tryfn[:i] - else: - checkfn = tryfn - if checkfn.strip() and (1 or checkfn.endswith('.html')): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno), checkfunc=localrefcheck) - -class CheckLink(py.test.collect.Item): - def __init__(self, name, parent, args, checkfunc): - super(CheckLink, self).__init__(name, parent) - self.args = args - self.checkfunc = checkfunc - - def runtest(self): - return self.checkfunc(*self.args) - - def reportinfo(self, basedir=None): - return (self.fspath, self.args[2], "checklink: %s" % self.args[0]) - -def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): - old = py.std.socket.getdefaulttimeout() - py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN) - try: - try: - py.builtin.print_("trying remote", tryfn) - py.std.urllib2.urlopen(tryfn) - finally: - py.std.socket.setdefaulttimeout(old) - except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): - e = sys.exc_info()[1] - if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden - py.test.skip("%s: %s" %(tryfn, str(e))) - else: - py.test.fail("remote reference error %r in %s:%d\n%s" %( - tryfn, path.basename, lineno+1, e)) - -def localrefcheck(tryfn, path, lineno): - # assume it should be a file - i = tryfn.find('#') - if tryfn.startswith('javascript:'): - return # don't check JS refs - if i != -1: - anchor = tryfn[i+1:] - tryfn = tryfn[:i] - else: - anchor = '' - fn = path.dirpath(tryfn) - ishtml = fn.ext == '.html' - fn = ishtml and fn.new(ext='.txt') or fn - py.builtin.print_("filename is", fn) - if not fn.check(): # not ishtml or not fn.check(): - if not py.path.local(tryfn).check(): # the html could be there - py.test.fail("reference error %r in %s:%d" %( - tryfn, path.basename, lineno+1)) - if anchor: - source = unicode(fn.read(), 'latin1') - source = source.lower().replace('-', ' ') # aehem - - anchor = anchor.replace('-', ' ') - match2 = ".. _`%s`:" % anchor - match3 = ".. _%s:" % anchor - candidates = (anchor, match2, match3) - py.builtin.print_("candidates", repr(candidates)) - for line in source.split('\n'): - line = line.strip() - if line in candidates: - break - else: - py.test.fail("anchor reference error %s#%s in %s:%d" %( - tryfn, anchor, path.basename, lineno+1)) - -if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()): - def log(msg): - print(msg) -else: - def log(msg): - pass - -def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'): - """ return html latin1-encoded document for the given input. - source a ReST-string - sourcepath where to look for includes (basically) - stylesheet path (to be used if any) - """ - from docutils.core import publish_string - kwargs = { - 'stylesheet' : stylesheet, - 'stylesheet_path': None, - 'traceback' : 1, - 'embed_stylesheet': 0, - 'output_encoding' : encoding, - #'halt' : 0, # 'info', - 'halt_level' : 2, - } - # docutils uses os.getcwd() :-( - source_path = os.path.abspath(str(source_path)) - prevdir = os.getcwd() - try: - #os.chdir(os.path.dirname(source_path)) - return publish_string(source, source_path, writer_name='html', - settings_overrides=kwargs) - finally: - os.chdir(prevdir) - -def process(txtpath, encoding='latin1'): - """ process a textfile """ - log("processing %s" % txtpath) - assert txtpath.check(ext='.txt') - if isinstance(txtpath, py.path.svnwc): - txtpath = txtpath.localpath - htmlpath = txtpath.new(ext='.html') - #svninfopath = txtpath.localpath.new(ext='.svninfo') - - style = txtpath.dirpath('style.css') - if style.check(): - stylesheet = style.basename - else: - stylesheet = None - content = unicode(txtpath.read(), encoding) - doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding) - htmlpath.open('wb').write(doc) - #log("wrote %r" % htmlpath) - #if txtpath.check(svnwc=1, versioned=1): - # info = txtpath.info() - # svninfopath.dump(info) - -if sys.version_info > (3, 0): - def _uni(s): return s -else: - def _uni(s): - return unicode(s) - -rex1 = re.compile(r'.*(.*).*', re.MULTILINE | re.DOTALL) -rex2 = re.compile(r'.*
(.*)
.*', re.MULTILINE | re.DOTALL) - -def strip_html_header(string, encoding='utf8'): - """ return the content of the body-tag """ - uni = unicode(string, encoding) - for rex in rex1,rex2: - match = rex.search(uni) - if not match: - break - uni = match.group(1) - return uni - -class Project: # used for confrest.py files - def __init__(self, sourcepath): - self.sourcepath = sourcepath - def process(self, path): - return process(path) - def get_htmloutputpath(self, path): - return path.new(ext='html') diff --git a/pypy/doc/test_redirections.py b/pypy/doc/test_redirections.py deleted file mode 100644 --- a/pypy/doc/test_redirections.py +++ /dev/null @@ -1,54 +0,0 @@ - -import py -redir = py.path.local(__file__).dirpath('redirections') - -def checkexist(path): - print "checking", path - assert path.ext == '.html' - assert path.new(ext='.txt').check(file=1) - -def checkredirection(oldname, newname): - print "checking", newname - if not newname.startswith('http://'): - newpath = redir.dirpath(newname.split('#')[0]) - checkexist(newpath) - # HACK: create the redirecting HTML file here... - # XXX obscure fishing - if py.test.config.option.generateredirections and '#' not in oldname: - generate_redirection(oldname, newname) - -def test_eval(): - d = eval(redir.read(mode='r')) - return d - -def test_redirections(): - d = test_eval() - for oldname, newname in d.items(): - yield checkredirection, oldname, newname - -def test_navlist(): - navlist = eval(redir.dirpath('navlist').read()) - for entry in navlist: - yield checkexist, redir.dirpath(entry) - -# ____________________________________________________________ - -def generate_redirection(oldname, newname): - print "redirecting from", oldname - oldpath = redir.dirpath(oldname) - url = newname # relative URL - oldpath.write(""" - - - - - - -

- you should be automatically redirected to - %s -

- - -""" % (url, url, url)) From commits-noreply at bitbucket.org Fri Mar 18 14:12:51 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 14:12:51 +0100 (CET) Subject: [pypy-svn] pypy default: port test_array_intimg from test_pypy_c Message-ID: <20110318131251.AF1EF282BAA@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42784:2b484cf91faf Date: 2011-03-18 14:01 +0100 http://bitbucket.org/pypy/pypy/changeset/2b484cf91faf/ Log: port test_array_intimg from test_pypy_c diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -960,7 +960,6 @@ log = self.run(main, []) assert log.result == 19507200 loop, = log.loops_by_filename(self.filepath) - import pdb;pdb.set_trace() assert loop.match(""" i12 = int_lt(i7, 307200) guard_true(i12, descr=) @@ -973,3 +972,42 @@ --TICK-- jump(p0, p1, p2, p3, p4, p5, p6, i18, i16, i9, i10, descr=) """) + + def test_array_intimg(self): + def main(): + from array import array + img = array('i', range(3)) * (350 * 480) + intimg = array('i', (0,)) * (640 * 480) + l, i = 0, 640 + while i < 640 * 480: + l = l + img[i] + intimg[i] = (intimg[i-640] + l) + i += 1 + return intimg[i - 1] + # + log = self.run(main, []) + assert log.result == 73574560 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i15 = int_lt(i8, 307200) + guard_true(i15, descr=) + i16 = int_lt(i8, i10) + guard_true(i16, descr=) + i18 = getarrayitem_raw(i11, i8, descr=) + i19 = int_add_ovf(i9, i18) + guard_no_overflow(descr=) + i21 = int_sub(i8, 640) + i22 = int_lt(i21, i12) + guard_true(i22, descr=) + i23 = getarrayitem_raw(i13, i21, descr=) + i24 = int_add_ovf(i23, i19) + guard_no_overflow(descr=) + i25 = int_lt(i8, i12) + guard_true(i25, descr=) + # on 64bit, there is a guard checking that i24 actually fits into 32bit + ... + setarrayitem_raw(i13, i8, i30, descr=) + i33 = int_add(i8, 1) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, p6, p7, i33, i19, i10, i11, i12, i13, descr=) + """) From commits-noreply at bitbucket.org Fri Mar 18 14:12:52 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 14:12:52 +0100 (CET) Subject: [pypy-svn] pypy default: improve the reporting (thanks to pytest like _assert magic) Message-ID: <20110318131252.36FC5282BAA@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42785:38a48b8331fe Date: 2011-03-18 14:07 +0100 http://bitbucket.org/pypy/pypy/changeset/38a48b8331fe/ Log: improve the reporting (thanks to pytest like _assert magic) diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -274,8 +274,7 @@ def match_descr(self, descr, exp_descr): if descr == exp_descr or exp_descr == '...': return True - match = exp_descr is not None and re.match(exp_descr, descr) - self._assert(match, "descr mismatch") + self._assert(exp_descr is not None and re.match(exp_descr, descr), "descr mismatch") def _assert(self, cond, message): if not cond: From commits-noreply at bitbucket.org Fri Mar 18 14:12:52 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 14:12:52 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110318131252.6F17D282BAD@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42786:77adefbb81e7 Date: 2011-03-18 14:12 +0100 http://bitbucket.org/pypy/pypy/changeset/77adefbb81e7/ Log: merge heads From commits-noreply at bitbucket.org Fri Mar 18 14:19:08 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 14:19:08 +0100 (CET) Subject: [pypy-svn] pypy default: don't rely on the exact offset Message-ID: <20110318131908.31996282BAA@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42787:7b22560b4e3f Date: 2011-03-18 14:18 +0100 http://bitbucket.org/pypy/pypy/changeset/7b22560b4e3f/ Log: don't rely on the exact offset diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -663,7 +663,7 @@ p22 = new_with_vtable(19511408) p24 = new_array(1, descr=) p26 = new_with_vtable(ConstClass(W_ListObject)) - p27 = new(descr=) + p27 = new(descr=) p29 = new_array(0, descr=) setfield_gc(p27, p29, descr=) setfield_gc(p26, p27, descr=<.* .*W_ListObject.inst_wrappeditems .*>) From commits-noreply at bitbucket.org Fri Mar 18 14:22:56 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 14:22:56 +0100 (CET) Subject: [pypy-svn] pypy default: fix the test on 32bit, where we get slightly different code Message-ID: <20110318132256.A712C282B8B@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42788:927529207222 Date: 2011-03-18 14:22 +0100 http://bitbucket.org/pypy/pypy/changeset/927529207222/ Log: fix the test on 32bit, where we get slightly different code diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -965,7 +965,7 @@ guard_true(i12, descr=) i13 = int_lt(i7, i9) guard_true(i13, descr=) - i15 = getarrayitem_raw(i10, i7, descr=) + i15 = getarrayitem_raw(i10, i7, descr=<.*ArrayNoLengthDescr>) i16 = int_add_ovf(i8, i15) guard_no_overflow(descr=) i18 = int_add(i7, 1) @@ -993,20 +993,20 @@ guard_true(i15, descr=) i16 = int_lt(i8, i10) guard_true(i16, descr=) - i18 = getarrayitem_raw(i11, i8, descr=) + i18 = getarrayitem_raw(i11, i8, descr=<.*ArrayNoLengthDescr>) i19 = int_add_ovf(i9, i18) guard_no_overflow(descr=) i21 = int_sub(i8, 640) i22 = int_lt(i21, i12) guard_true(i22, descr=) - i23 = getarrayitem_raw(i13, i21, descr=) + i23 = getarrayitem_raw(i13, i21, descr=<.*ArrayNoLengthDescr>) i24 = int_add_ovf(i23, i19) guard_no_overflow(descr=) i25 = int_lt(i8, i12) guard_true(i25, descr=) # on 64bit, there is a guard checking that i24 actually fits into 32bit ... - setarrayitem_raw(i13, i8, i30, descr=) + setarrayitem_raw(i13, i8, _, descr=<.*ArrayNoLengthDescr>) i33 = int_add(i8, 1) --TICK-- jump(p0, p1, p2, p3, p4, p5, p6, p7, i33, i19, i10, i11, i12, i13, descr=) From commits-noreply at bitbucket.org Fri Mar 18 15:07:25 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 15:07:25 +0100 (CET) Subject: [pypy-svn] pypy default: (antocuni, hakanardo around): improve the test, and check that we remove a couple of guards thanks to the asserts Message-ID: <20110318140725.A1C42282B8B@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42789:f187f9701d1d Date: 2011-03-18 15:05 +0100 http://bitbucket.org/pypy/pypy/changeset/f187f9701d1d/ Log: (antocuni, hakanardo around): improve the test, and check that we remove a couple of guards thanks to the asserts diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -980,6 +980,9 @@ intimg = array('i', (0,)) * (640 * 480) l, i = 0, 640 while i < 640 * 480: + assert len(img) == 3*350*480 + assert len(intimg) == 640*480 + assert i >= 0 l = l + img[i] intimg[i] = (intimg[i-640] + l) i += 1 @@ -989,25 +992,21 @@ assert log.result == 73574560 loop, = log.loops_by_filename(self.filepath) assert loop.match(""" - i15 = int_lt(i8, 307200) - guard_true(i15, descr=) - i16 = int_lt(i8, i10) - guard_true(i16, descr=) - i18 = getarrayitem_raw(i11, i8, descr=<.*ArrayNoLengthDescr>) - i19 = int_add_ovf(i9, i18) + i13 = int_lt(i8, 307200) + guard_true(i13, descr=) + # the bound check guard on img has been killed (thanks to the 1st and 2nd asserts) + i14 = getarrayitem_raw(i10, i8, descr=<.*ArrayNoLengthDescr>) + i15 = int_add_ovf(i9, i14) + guard_no_overflow(descr=) + i17 = int_sub(i8, 640) + # the bound check guard on intimg has been killed (thanks to the 3rd assert) + i18 = getarrayitem_raw(i11, i17, descr=<.*ArrayNoLengthDescr>) + i19 = int_add_ovf(i18, i15) guard_no_overflow(descr=) - i21 = int_sub(i8, 640) - i22 = int_lt(i21, i12) - guard_true(i22, descr=) - i23 = getarrayitem_raw(i13, i21, descr=<.*ArrayNoLengthDescr>) - i24 = int_add_ovf(i23, i19) - guard_no_overflow(descr=) - i25 = int_lt(i8, i12) - guard_true(i25, descr=) - # on 64bit, there is a guard checking that i24 actually fits into 32bit + # on 64bit, there is a guard checking that i19 actually fits into 32bit ... - setarrayitem_raw(i13, i8, _, descr=<.*ArrayNoLengthDescr>) - i33 = int_add(i8, 1) + setarrayitem_raw(i11, i8, _, descr=<.*ArrayNoLengthDescr>) + i28 = int_add(i8, 1) --TICK-- - jump(p0, p1, p2, p3, p4, p5, p6, p7, i33, i19, i10, i11, i12, i13, descr=) + jump(p0, p1, p2, p3, p4, p5, p6, p7, i28, i15, i10, i11, descr=) """) From commits-noreply at bitbucket.org Fri Mar 18 15:07:26 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 18 Mar 2011 15:07:26 +0100 (CET) Subject: [pypy-svn] pypy default: rewrite the test, to kill one guard Message-ID: <20110318140726.38E25282B8B@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42790:263e6f36aa8d Date: 2011-03-18 15:06 +0100 http://bitbucket.org/pypy/pypy/changeset/263e6f36aa8d/ Log: rewrite the test, to kill one guard diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -952,7 +952,7 @@ from array import array img = array("i", range(128) * 5) * 480 l, i = 0, 0 - while i < 640 * 480: + while i < len(img): l += img[i] i += 1 return l @@ -961,13 +961,11 @@ assert log.result == 19507200 loop, = log.loops_by_filename(self.filepath) assert loop.match(""" - i12 = int_lt(i7, 307200) - guard_true(i12, descr=) i13 = int_lt(i7, i9) - guard_true(i13, descr=) + guard_true(i13, descr=) i15 = getarrayitem_raw(i10, i7, descr=<.*ArrayNoLengthDescr>) i16 = int_add_ovf(i8, i15) - guard_no_overflow(descr=) + guard_no_overflow(descr=) i18 = int_add(i7, 1) --TICK-- jump(p0, p1, p2, p3, p4, p5, p6, i18, i16, i9, i10, descr=) From commits-noreply at bitbucket.org Fri Mar 18 15:18:22 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Fri, 18 Mar 2011 15:18:22 +0100 (CET) Subject: [pypy-svn] pypy default: translator: remove unused list/dict instances in the c funcgen Message-ID: <20110318141822.9B90C282B8B@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42791:467cb1d0f61c Date: 2011-03-18 15:16 +0100 http://bitbucket.org/pypy/pypy/changeset/467cb1d0f61c/ Log: translator: remove unused list/dict instances in the c funcgen diff --git a/pypy/translator/c/funcgen.py b/pypy/translator/c/funcgen.py --- a/pypy/translator/c/funcgen.py +++ b/pypy/translator/c/funcgen.py @@ -299,7 +299,6 @@ def gen_link(self, link): "Generate the code to jump across the given Link." - is_alive = {} assignments = [] for a1, a2 in zip(link.args, link.target.inputargs): a2type, a2typename = self.illtypes[a2] @@ -808,7 +807,6 @@ from pypy.rpython.lltypesystem.rstr import STR msg = op.args[0] assert msg.concretetype == Ptr(STR) - argv = [] if isinstance(msg, Constant): msg = c_string_constant(''.join(msg.value.chars)) else: From commits-noreply at bitbucket.org Fri Mar 18 16:48:15 2011 From: commits-noreply at bitbucket.org (arigo) Date: Fri, 18 Mar 2011 16:48:15 +0100 (CET) Subject: [pypy-svn] pypy enable-opts: Close branch. Message-ID: <20110318154815.7ECED282B8B@codespeak.net> Author: Armin Rigo Branch: enable-opts Changeset: r42792:85eb5570280a Date: 2011-03-18 11:47 -0400 http://bitbucket.org/pypy/pypy/changeset/85eb5570280a/ Log: Close branch. From commits-noreply at bitbucket.org Fri Mar 18 18:36:12 2011 From: commits-noreply at bitbucket.org (etrepum) Date: Fri, 18 Mar 2011 18:36:12 +0100 (CET) Subject: [pypy-svn] pypy default: fix up get_total_memory on darwin to clip at addressable_size Message-ID: <20110318173612.1CE09282B8B@codespeak.net> Author: Bob Ippolito Branch: Changeset: r42796:eec5b51676ae Date: 2011-03-18 10:35 -0700 http://bitbucket.org/pypy/pypy/changeset/eec5b51676ae/ Log: fix up get_total_memory on darwin to clip at addressable_size diff --git a/pypy/rpython/memory/gc/test/test_env.py b/pypy/rpython/memory/gc/test/test_env.py --- a/pypy/rpython/memory/gc/test/test_env.py +++ b/pypy/rpython/memory/gc/test/test_env.py @@ -15,6 +15,15 @@ assert x == y assert type(x) == type(y) +def test_get_total_memory_darwin(): + # this only tests clipping + BIG = 2 * env.addressable_size + SMALL = env.addressable_size / 2 + assert env.addressable_size == env.get_total_memory_darwin(0) + assert env.addressable_size == env.get_total_memory_darwin(-1) + assert env.addressable_size == env.get_total_memory_darwin(BIG) + assert SMALL == env.get_total_memory_darwin(SMALL) + def test_get_total_memory(): # total memory should be at least a megabyte assert env.get_total_memory() > 1024*1024 diff --git a/pypy/rpython/memory/gc/env.py b/pypy/rpython/memory/gc/env.py --- a/pypy/rpython/memory/gc/env.py +++ b/pypy/rpython/memory/gc/env.py @@ -95,13 +95,26 @@ return result +def get_total_memory_darwin(result): + debug_start("gc-hardware") + if result <= 0: + debug_print("get_total_memory() failed") + result = addressable_size + else: + debug_print("memtotal = ", result) + if result > addressable_size: + result = addressable_size + debug_stop("gc-hardware") + return result + + if sys.platform == 'linux2': def get_total_memory(): return get_total_memory_linux2('/proc/meminfo') elif sys.platform == 'darwin': def get_total_memory(): - return get_darwin_sysctl_signed('hw.memsize') + return get_total_memory_darwin(get_darwin_sysctl_signed('hw.memsize')) else: def get_total_memory(): From commits-noreply at bitbucket.org Fri Mar 18 22:06:19 2011 From: commits-noreply at bitbucket.org (pjenvey) Date: Fri, 18 Mar 2011 22:06:19 +0100 (CET) Subject: [pypy-svn] pypy default: fix thinko in _abstract_method_error's error message Message-ID: <20110318210619.B57C636C205@codespeak.net> Author: Philip Jenvey Branch: Changeset: r42797:362d11da6174 Date: 2011-03-18 13:51 -0700 http://bitbucket.org/pypy/pypy/changeset/362d11da6174/ Log: fix thinko in _abstract_method_error's error message diff --git a/pypy/objspace/std/objecttype.py b/pypy/objspace/std/objecttype.py --- a/pypy/objspace/std/objecttype.py +++ b/pypy/objspace/std/objecttype.py @@ -53,7 +53,7 @@ def _abstract_method_error(typ): methods = ", ".join(sorted(typ.__abstractmethods__)) err = "Can't instantiate abstract class %s with abstract methods %s" - raise TypeError(err % (methods, typ.__name__)) + raise TypeError(err % (typ.__name__, methods)) """) _abstract_method_error = app.interphook("_abstract_method_error") From commits-noreply at bitbucket.org Fri Mar 18 23:47:02 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Fri, 18 Mar 2011 23:47:02 +0100 (CET) Subject: [pypy-svn] pypy numpy-exp: Merged default into numpy-exp. Message-ID: <20110318224702.DC8A936C205@codespeak.net> Author: Alex Gaynor Branch: numpy-exp Changeset: r42798:f90ccf766b69 Date: 2011-03-18 17:42 -0500 http://bitbucket.org/pypy/pypy/changeset/f90ccf766b69/ Log: Merged default into numpy-exp. diff --git a/py/_test/parseopt.py b/py/_test/parseopt.py deleted file mode 100644 --- a/py/_test/parseopt.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -thin wrapper around Python's optparse.py -adding some extra checks and ways to systematically -have Environment variables provide default values -for options. basic usage: - - >>> parser = Parser() - >>> parser.addoption("--hello", action="store_true", dest="hello") - >>> option, args = parser.parse(['--hello']) - >>> option.hello - True - >>> args - [] - -""" -import py -import optparse - -class Parser: - """ Parser for command line arguments. """ - - def __init__(self, usage=None, processopt=None): - self._anonymous = OptionGroup("custom options", parser=self) - self._groups = [] - self._processopt = processopt - self._usage = usage - self.hints = [] - - def processoption(self, option): - if self._processopt: - if option.dest: - self._processopt(option) - - def addnote(self, note): - self._notes.append(note) - - def getgroup(self, name, description="", after=None): - for group in self._groups: - if group.name == name: - return group - group = OptionGroup(name, description, parser=self) - i = 0 - for i, grp in enumerate(self._groups): - if grp.name == after: - break - self._groups.insert(i+1, group) - return group - - addgroup = getgroup - def addgroup(self, name, description=""): - py.log._apiwarn("1.1", "use getgroup() which gets-or-creates") - return self.getgroup(name, description) - - def addoption(self, *opts, **attrs): - """ add an optparse-style option. """ - self._anonymous.addoption(*opts, **attrs) - - def parse(self, args): - optparser = MyOptionParser(self) - groups = self._groups + [self._anonymous] - for group in groups: - if group.options: - desc = group.description or group.name - optgroup = optparse.OptionGroup(optparser, desc) - optgroup.add_options(group.options) - optparser.add_option_group(optgroup) - return optparser.parse_args([str(x) for x in args]) - - def parse_setoption(self, args, option): - parsedoption, args = self.parse(args) - for name, value in parsedoption.__dict__.items(): - setattr(option, name, value) - return args - - -class OptionGroup: - def __init__(self, name, description="", parser=None): - self.name = name - self.description = description - self.options = [] - self.parser = parser - - def addoption(self, *optnames, **attrs): - """ add an option to this group. """ - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=False) - - def _addoption(self, *optnames, **attrs): - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=True) - - def _addoption_instance(self, option, shortupper=False): - if not shortupper: - for opt in option._short_opts: - if opt[0] == '-' and opt[1].islower(): - raise ValueError("lowercase shortoptions reserved") - if self.parser: - self.parser.processoption(option) - self.options.append(option) - - -class MyOptionParser(optparse.OptionParser): - def __init__(self, parser): - self._parser = parser - optparse.OptionParser.__init__(self, usage=parser._usage) - def format_epilog(self, formatter): - hints = self._parser.hints - if hints: - s = "\n".join(["hint: " + x for x in hints]) + "\n" - s = "\n" + s + "\n" - return s - return "" diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/py/_test/pycollect.py b/py/_test/pycollect.py deleted file mode 100644 --- a/py/_test/pycollect.py +++ /dev/null @@ -1,399 +0,0 @@ -""" -Python related collection nodes. -""" -import py -import inspect -from py._test.collect import configproperty, warnoldcollect -from py._test import funcargs -from py._code.code import TerminalRepr - -class PyobjMixin(object): - def obj(): - def fget(self): - try: - return self._obj - except AttributeError: - self._obj = obj = self._getobj() - return obj - def fset(self, value): - self._obj = value - return property(fget, fset, None, "underlying python object") - obj = obj() - - def _getobj(self): - return getattr(self.parent.obj, self.name) - - def getmodpath(self, stopatmodule=True, includemodule=False): - """ return python path relative to the containing module. """ - chain = self.listchain() - chain.reverse() - parts = [] - for node in chain: - if isinstance(node, Instance): - continue - name = node.name - if isinstance(node, Module): - assert name.endswith(".py") - name = name[:-3] - if stopatmodule: - if includemodule: - parts.append(name) - break - parts.append(name) - parts.reverse() - s = ".".join(parts) - return s.replace(".[", "[") - - def _getfslineno(self): - try: - return self._fslineno - except AttributeError: - pass - obj = self.obj - # xxx let decorators etc specify a sane ordering - if hasattr(obj, 'place_as'): - obj = obj.place_as - - self._fslineno = py.code.getfslineno(obj) - return self._fslineno - - def reportinfo(self): - fspath, lineno = self._getfslineno() - modpath = self.getmodpath() - return fspath, lineno, modpath - -class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): - Class = configproperty('Class') - Instance = configproperty('Instance') - Function = configproperty('Function') - Generator = configproperty('Generator') - - def funcnamefilter(self, name): - return name.startswith('test') - def classnamefilter(self, name): - return name.startswith('Test') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - # NB. we avoid random getattrs and peek in the __dict__ instead - dicts = [getattr(self.obj, '__dict__', {})] - for basecls in inspect.getmro(self.obj.__class__): - dicts.append(basecls.__dict__) - seen = {} - l = [] - for dic in dicts: - for name, obj in dic.items(): - if name in seen: - continue - seen[name] = True - if name[0] != "_": - res = self.makeitem(name, obj) - if res is None: - continue - if not isinstance(res, list): - res = [res] - l.extend(res) - l.sort(key=lambda item: item.reportinfo()[:2]) - return l - - def _deprecated_join(self, name): - if self.__class__.join != py.test.collect.Collector.join: - warnoldcollect() - return self.join(name) - - def makeitem(self, name, obj): - return self.ihook.pytest_pycollect_makeitem( - collector=self, name=name, obj=obj) - - def _istestclasscandidate(self, name, obj): - if self.classnamefilter(name) and \ - inspect.isclass(obj): - if hasinit(obj): - # XXX WARN - return False - return True - - def _genfunctions(self, name, funcobj): - module = self.getparent(Module).obj - clscol = self.getparent(Class) - cls = clscol and clscol.obj or None - metafunc = funcargs.Metafunc(funcobj, config=self.config, - cls=cls, module=module) - gentesthook = self.config.hook.pytest_generate_tests - plugins = funcargs.getplugins(self, withpy=True) - gentesthook.pcall(plugins, metafunc=metafunc) - if not metafunc._calls: - return self.Function(name, parent=self) - l = [] - for callspec in metafunc._calls: - subname = "%s[%s]" %(name, callspec.id) - function = self.Function(name=subname, parent=self, - callspec=callspec, callobj=funcobj) - l.append(function) - return l - -class Module(py.test.collect.File, PyCollectorMixin): - def _getobj(self): - return self._memoizedcall('_obj', self._importtestmodule) - - def _importtestmodule(self): - # we assume we are only called once per module - mod = self.fspath.pyimport() - #print "imported test module", mod - self.config.pluginmanager.consider_module(mod) - return mod - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - if hasattr(self.obj, 'setup_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.setup_module)[0]: - self.obj.setup_module(self.obj) - else: - self.obj.setup_module() - - def teardown(self): - if hasattr(self.obj, 'teardown_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.teardown_module)[0]: - self.obj.teardown_module(self.obj) - else: - self.obj.teardown_module() - -class Class(PyCollectorMixin, py.test.collect.Collector): - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - return [self.Instance(name="()", parent=self)] - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - setup_class = getattr(self.obj, 'setup_class', None) - if setup_class is not None: - setup_class = getattr(setup_class, 'im_func', setup_class) - setup_class(self.obj) - - def teardown(self): - teardown_class = getattr(self.obj, 'teardown_class', None) - if teardown_class is not None: - teardown_class = getattr(teardown_class, 'im_func', teardown_class) - teardown_class(self.obj) - -class Instance(PyCollectorMixin, py.test.collect.Collector): - def _getobj(self): - return self.parent.obj() - def Function(self): - return getattr(self.obj, 'Function', - PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2 - def _keywords(self): - return [] - Function = property(Function) - - #def __repr__(self): - # return "<%s of '%s'>" %(self.__class__.__name__, - # self.parent.obj.__name__) - - def newinstance(self): - self.obj = self._getobj() - return self.obj - -class FunctionMixin(PyobjMixin): - """ mixin for the code common to Function and Generator. - """ - - def setup(self): - """ perform setup for this test function. """ - if inspect.ismethod(self.obj): - name = 'setup_method' - else: - name = 'setup_function' - if isinstance(self.parent, Instance): - obj = self.parent.newinstance() - self.obj = self._getobj() - else: - obj = self.parent.obj - setup_func_or_method = getattr(obj, name, None) - if setup_func_or_method is not None: - setup_func_or_method(self.obj) - - def teardown(self): - """ perform teardown for this test function. """ - if inspect.ismethod(self.obj): - name = 'teardown_method' - else: - name = 'teardown_function' - obj = self.parent.obj - teardown_func_or_meth = getattr(obj, name, None) - if teardown_func_or_meth is not None: - teardown_func_or_meth(self.obj) - - def _prunetraceback(self, traceback): - if hasattr(self, '_obj') and not self.config.option.fulltrace: - code = py.code.Code(self.obj) - path, firstlineno = code.path, code.firstlineno - ntraceback = traceback.cut(path=path, firstlineno=firstlineno) - if ntraceback == traceback: - ntraceback = ntraceback.cut(path=path) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - - def _repr_failure_py(self, excinfo, style="long"): - if excinfo.errisinstance(funcargs.FuncargRequest.LookupError): - fspath, lineno, msg = self.reportinfo() - lines, _ = inspect.getsourcelines(self.obj) - for i, line in enumerate(lines): - if line.strip().startswith('def'): - return FuncargLookupErrorRepr(fspath, lineno, - lines[:i+1], str(excinfo.value)) - return super(FunctionMixin, self)._repr_failure_py(excinfo, - style=style) - - def repr_failure(self, excinfo, outerr=None): - assert outerr is None, "XXX outerr usage is deprecated" - return self._repr_failure_py(excinfo, - style=self.config.getvalue("tbstyle")) - - shortfailurerepr = "F" - -class FuncargLookupErrorRepr(TerminalRepr): - def __init__(self, filename, firstlineno, deflines, errorstring): - self.deflines = deflines - self.errorstring = errorstring - self.filename = filename - self.firstlineno = firstlineno - - def toterminal(self, tw): - tw.line() - for line in self.deflines: - tw.line(" " + line.strip()) - for line in self.errorstring.split("\n"): - tw.line(" " + line.strip(), red=True) - tw.line() - tw.line("%s:%d" % (self.filename, self.firstlineno+1)) - -class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): - def collect(self): - # test generators are seen as collectors but they also - # invoke setup/teardown on popular request - # (induced by the common "test_*" naming shared with normal tests) - self.config._setupstate.prepare(self) - l = [] - seen = {} - for i, x in enumerate(self.obj()): - name, call, args = self.getcallargs(x) - if not py.builtin.callable(call): - raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) - if name is None: - name = "[%d]" % i - else: - name = "['%s']" % name - if name in seen: - raise ValueError("%r generated tests with non-unique name %r" %(self, name)) - seen[name] = True - l.append(self.Function(name, self, args=args, callobj=call)) - return l - - def getcallargs(self, obj): - if not isinstance(obj, (tuple, list)): - obj = (obj,) - # explict naming - if isinstance(obj[0], py.builtin._basestring): - name = obj[0] - obj = obj[1:] - else: - name = None - call, args = obj[0], obj[1:] - return name, call, args - - -# -# Test Items -# -_dummy = object() -class Function(FunctionMixin, py.test.collect.Item): - """ a Function Item is responsible for setting up - and executing a Python callable test object. - """ - _genid = None - def __init__(self, name, parent=None, args=None, config=None, - callspec=None, callobj=_dummy): - super(Function, self).__init__(name, parent, config=config) - self._args = args - if self._isyieldedfunction(): - assert not callspec, "yielded functions (deprecated) cannot have funcargs" - else: - if callspec is not None: - self.funcargs = callspec.funcargs or {} - self._genid = callspec.id - if hasattr(callspec, "param"): - self._requestparam = callspec.param - else: - self.funcargs = {} - if callobj is not _dummy: - self._obj = callobj - self.function = getattr(self.obj, 'im_func', self.obj) - - def _getobj(self): - name = self.name - i = name.find("[") # parametrization - if i != -1: - name = name[:i] - return getattr(self.parent.obj, name) - - def _isyieldedfunction(self): - return self._args is not None - - def readkeywords(self): - d = super(Function, self).readkeywords() - d.update(py.builtin._getfuncdict(self.obj)) - return d - - def runtest(self): - """ execute the underlying test function. """ - self.ihook.pytest_pyfunc_call(pyfuncitem=self) - - def setup(self): - super(Function, self).setup() - if hasattr(self, 'funcargs'): - funcargs.fillfuncargs(self) - - def __eq__(self, other): - try: - return (self.name == other.name and - self._args == other._args and - self.parent == other.parent and - self.obj == other.obj and - getattr(self, '_genid', None) == - getattr(other, '_genid', None) - ) - except AttributeError: - pass - return False - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.parent, self.name)) - -def hasinit(obj): - init = getattr(obj, '__init__', None) - if init: - if init != object.__init__: - return True diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/py/_cmdline/pycountloc.py b/py/_cmdline/pycountloc.py deleted file mode 100755 --- a/py/_cmdline/pycountloc.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# hands on script to compute the non-empty Lines of Code -# for tests and non-test code - -"""\ -py.countloc [PATHS] - -Count (non-empty) lines of python code and number of python files recursively -starting from a list of paths given on the command line (starting from the -current working directory). Distinguish between test files and normal ones and -report them separately. -""" -import py - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - (options, args) = parser.parse_args() - countloc(args) - -def nodot(p): - return p.check(dotfile=0) - -class FileCounter(object): - def __init__(self): - self.file2numlines = {} - self.numlines = 0 - self.numfiles = 0 - - def addrecursive(self, directory, fil="*.py", rec=nodot): - for x in directory.visit(fil, rec): - self.addfile(x) - - def addfile(self, fn, emptylines=False): - if emptylines: - s = len(p.readlines()) - else: - s = 0 - for i in fn.readlines(): - if i.strip(): - s += 1 - self.file2numlines[fn] = s - self.numfiles += 1 - self.numlines += s - - def getnumlines(self, fil): - numlines = 0 - for path, value in self.file2numlines.items(): - if fil(path): - numlines += value - return numlines - - def getnumfiles(self, fil): - numfiles = 0 - for path in self.file2numlines: - if fil(path): - numfiles += 1 - return numfiles - -def get_loccount(locations=None): - if locations is None: - localtions = [py.path.local()] - counter = FileCounter() - for loc in locations: - counter.addrecursive(loc, '*.py', rec=nodot) - - def istestfile(p): - return p.check(fnmatch='test_*.py') - isnottestfile = lambda x: not istestfile(x) - - numfiles = counter.getnumfiles(isnottestfile) - numlines = counter.getnumlines(isnottestfile) - numtestfiles = counter.getnumfiles(istestfile) - numtestlines = counter.getnumlines(istestfile) - - return counter, numfiles, numlines, numtestfiles, numtestlines - -def countloc(paths=None): - if not paths: - paths = ['.'] - locations = [py.path.local(x) for x in paths] - (counter, numfiles, numlines, numtestfiles, - numtestlines) = get_loccount(locations) - - items = counter.file2numlines.items() - items.sort(lambda x,y: cmp(x[1], y[1])) - for x, y in items: - print("%3d %30s" % (y,x)) - - print("%30s %3d" %("number of testfiles", numtestfiles)) - print("%30s %3d" %("number of non-empty testlines", numtestlines)) - print("%30s %3d" %("number of files", numfiles)) - print("%30s %3d" %("number of non-empty lines", numlines)) - diff --git a/py/_cmdline/pyconvert_unittest.py b/py/_cmdline/pyconvert_unittest.py deleted file mode 100644 --- a/py/_cmdline/pyconvert_unittest.py +++ /dev/null @@ -1,253 +0,0 @@ -import re -import sys - -try: - import parser -except ImportError: - parser = None - -d={} -# d is the dictionary of unittest changes, keyed to the old name -# used by unittest. -# d[old][0] is the new replacement function. -# d[old][1] is the operator you will substitute, or '' if there is none. -# d[old][2] is the possible number of arguments to the unittest -# function. - -# Old Unittest Name new name operator # of args -d['assertRaises'] = ('raises', '', ['Any']) -d['fail'] = ('raise AssertionError', '', [0,1]) -d['assert_'] = ('assert', '', [1,2]) -d['failIf'] = ('assert not', '', [1,2]) -d['assertEqual'] = ('assert', ' ==', [2,3]) -d['failIfEqual'] = ('assert not', ' ==', [2,3]) -d['assertIn'] = ('assert', ' in', [2,3]) -d['assertNotIn'] = ('assert', ' not in', [2,3]) -d['assertNotEqual'] = ('assert', ' !=', [2,3]) -d['failUnlessEqual'] = ('assert', ' ==', [2,3]) -d['assertAlmostEqual'] = ('assert round', ' ==', [2,3,4]) -d['failIfAlmostEqual'] = ('assert not round', ' ==', [2,3,4]) -d['assertNotAlmostEqual'] = ('assert round', ' !=', [2,3,4]) -d['failUnlessAlmostEquals'] = ('assert round', ' ==', [2,3,4]) - -# the list of synonyms -d['failUnlessRaises'] = d['assertRaises'] -d['failUnless'] = d['assert_'] -d['assertEquals'] = d['assertEqual'] -d['assertNotEquals'] = d['assertNotEqual'] -d['assertAlmostEquals'] = d['assertAlmostEqual'] -d['assertNotAlmostEquals'] = d['assertNotAlmostEqual'] - -# set up the regular expressions we will need -leading_spaces = re.compile(r'^(\s*)') # this never fails - -pat = '' -for k in d.keys(): # this complicated pattern to match all unittests - pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever( - -old_names = re.compile(pat[1:]) -linesep='\n' # nobody will really try to convert files not read - # in text mode, will they? - - -def blocksplitter(fp): - '''split a file into blocks that are headed by functions to rename''' - - blocklist = [] - blockstring = '' - - for line in fp: - interesting = old_names.match(line) - if interesting : - if blockstring: - blocklist.append(blockstring) - blockstring = line # reset the block - else: - blockstring += line - - blocklist.append(blockstring) - return blocklist - -def rewrite_utest(block): - '''rewrite every block to use the new utest functions''' - - '''returns the rewritten unittest, unless it ran into problems, - in which case it just returns the block unchanged. - ''' - utest = old_names.match(block) - - if not utest: - return block - - old = utest.group(0).lstrip()[5:-1] # the name we want to replace - new = d[old][0] # the name of the replacement function - op = d[old][1] # the operator you will use , or '' if there is none. - possible_args = d[old][2] # a list of the number of arguments the - # unittest function could possibly take. - - if possible_args == ['Any']: # just rename assertRaises & friends - return re.sub('self.'+old, new, block) - - message_pos = possible_args[-1] - # the remaining unittests can have an optional message to print - # when they fail. It is always the last argument to the function. - - try: - indent, argl, trailer = decompose_unittest(old, block) - - except SyntaxError: # but we couldn't parse it! - return block - - argnum = len(argl) - if argnum not in possible_args: - # sanity check - this one isn't real either - return block - - elif argnum == message_pos: - message = argl[-1] - argl = argl[:-1] - else: - message = None - - if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail() - string = '' - if message: - message = ' ' + message - - elif message_pos is 4: # assertAlmostEqual & friends - try: - pos = argl[2].lstrip() - except IndexError: - pos = '7' # default if none is specified - string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op ) - - else: # assert_, assertEquals and all the rest - string = ' ' + op.join(argl) - - if message: - string = string + ',' + message - - return indent + new + string + trailer - -def decompose_unittest(old, block): - '''decompose the block into its component parts''' - - ''' returns indent, arglist, trailer - indent -- the indentation - arglist -- the arguments to the unittest function - trailer -- any extra junk after the closing paren, such as #commment - ''' - - indent = re.match(r'(\s*)', block).group() - pat = re.search('self.' + old + r'\(', block) - - args, trailer = get_expr(block[pat.end():], ')') - arglist = break_args(args, []) - - if arglist == ['']: # there weren't any - return indent, [], trailer - - for i in range(len(arglist)): - try: - parser.expr(arglist[i].lstrip('\t ')) - except SyntaxError: - if i == 0: - arglist[i] = '(' + arglist[i] + ')' - else: - arglist[i] = ' (' + arglist[i] + ')' - - return indent, arglist, trailer - -def break_args(args, arglist): - '''recursively break a string into a list of arguments''' - try: - first, rest = get_expr(args, ',') - if not rest: - return arglist + [first] - else: - return [first] + break_args(rest, arglist) - except SyntaxError: - return arglist + [args] - -def get_expr(s, char): - '''split a string into an expression, and the rest of the string''' - - pos=[] - for i in range(len(s)): - if s[i] == char: - pos.append(i) - if pos == []: - raise SyntaxError # we didn't find the expected char. Ick. - - for p in pos: - # make the python parser do the hard work of deciding which comma - # splits the string into two expressions - try: - parser.expr('(' + s[:p] + ')') - return s[:p], s[p+1:] - except SyntaxError: # It's not an expression yet - pass - raise SyntaxError # We never found anything that worked. - - -def main(): - import sys - import py - - usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]" - optparser = py.std.optparse.OptionParser(usage) - - def select_output (option, opt, value, optparser, **kw): - if hasattr(optparser, 'output'): - optparser.error( - 'Cannot combine -s -i and -c options. Use one only.') - else: - optparser.output = kw['output'] - - optparser.add_option("-s", "--stdout", action="callback", - callback=select_output, - callback_kwargs={'output':'stdout'}, - help="send your output to stdout") - - optparser.add_option("-i", "--inplace", action="callback", - callback=select_output, - callback_kwargs={'output':'inplace'}, - help="overwrite files in place") - - optparser.add_option("-c", "--copy", action="callback", - callback=select_output, - callback_kwargs={'output':'copy'}, - help="copy files ... fn.py --> fn_cp.py") - - options, args = optparser.parse_args() - - output = getattr(optparser, 'output', 'stdout') - - if output in ['inplace', 'copy'] and not args: - optparser.error( - '-i and -c option require at least one filename') - - if not args: - s = '' - for block in blocksplitter(sys.stdin): - s += rewrite_utest(block) - sys.stdout.write(s) - - else: - for infilename in args: # no error checking to see if we can open, etc. - infile = file(infilename) - s = '' - for block in blocksplitter(infile): - s += rewrite_utest(block) - if output == 'inplace': - outfile = file(infilename, 'w+') - elif output == 'copy': # yes, just go clobber any existing .cp - outfile = file (infilename[:-3]+ '_cp.py', 'w+') - else: - outfile = sys.stdout - - outfile.write(s) - - -if __name__ == '__main__': - main() diff --git a/py/_test/__init__.py b/py/_test/__init__.py deleted file mode 100644 --- a/py/_test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -""" assertion and py.test helper API.""" diff --git a/py/_cmdline/__init__.py b/py/_cmdline/__init__.py deleted file mode 100644 --- a/py/_cmdline/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitely maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/py/_path/gateway/channeltest.py b/py/_path/gateway/channeltest.py deleted file mode 100644 --- a/py/_path/gateway/channeltest.py +++ /dev/null @@ -1,65 +0,0 @@ -import threading - - -class PathServer: - - def __init__(self, channel): - self.channel = channel - self.C2P = {} - self.next_id = 0 - threading.Thread(target=self.serve).start() - - def p2c(self, path): - id = self.next_id - self.next_id += 1 - self.C2P[id] = path - return id - - def command_LIST(self, id, *args): - path = self.C2P[id] - answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)] - self.channel.send(answer) - - def command_DEL(self, id): - del self.C2P[id] - - def command_GET(self, id, spec): - path = self.C2P[id] - self.channel.send(path._getbyspec(spec)) - - def command_READ(self, id): - path = self.C2P[id] - self.channel.send(path.read()) - - def command_JOIN(self, id, resultid, *args): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.join(*args) - - def command_DIRPATH(self, id, resultid): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.dirpath() - - def serve(self): - try: - while 1: - msg = self.channel.receive() - meth = getattr(self, 'command_' + msg[0]) - meth(*msg[1:]) - except EOFError: - pass - -if __name__ == '__main__': - import py - gw = execnet.PopenGateway() - channel = gw._channelfactory.new() - srv = PathServer(channel) - c = gw.remote_exec(""" - import remotepath - p = remotepath.RemotePath(channel.receive(), channel.receive()) - channel.send(len(p.listdir())) - """) - c.send(channel) - c.send(srv.p2c(py.path.local('/tmp'))) - print(c.receive()) diff --git a/py/_plugin/standalonetemplate.py b/py/_plugin/standalonetemplate.py deleted file mode 100755 --- a/py/_plugin/standalonetemplate.py +++ /dev/null @@ -1,63 +0,0 @@ -#! /usr/bin/env python - -sources = """ - at SOURCES@""" - -import sys -import base64 -import zlib -import imp - -class DictImporter(object): - def __init__(self, sources): - self.sources = sources - - def find_module(self, fullname, path=None): - if fullname in self.sources: - return self - if fullname+'.__init__' in self.sources: - return self - return None - - def load_module(self, fullname): - # print "load_module:", fullname - from types import ModuleType - try: - s = self.sources[fullname] - is_pkg = False - except KeyError: - s = self.sources[fullname+'.__init__'] - is_pkg = True - - co = compile(s, fullname, 'exec') - module = sys.modules.setdefault(fullname, ModuleType(fullname)) - module.__file__ = "%s/%s" % (__file__, fullname) - module.__loader__ = self - if is_pkg: - module.__path__ = [fullname] - - do_exec(co, module.__dict__) - return sys.modules[fullname] - - def get_source(self, name): - res = self.sources.get(name) - if res is None: - res = self.sources.get(name+'.__init__') - return res - -if __name__ == "__main__": - if sys.version_info >= (3,0): - exec("def do_exec(co, loc): exec(co, loc)\n") - import pickle - sources = sources.encode("ascii") # ensure bytes - sources = pickle.loads(zlib.decompress(base64.decodebytes(sources))) - else: - import cPickle as pickle - exec("def do_exec(co, loc): exec co in loc\n") - sources = pickle.loads(zlib.decompress(base64.decodestring(sources))) - - importer = DictImporter(sources) - sys.meta_path.append(importer) - - import py - py.cmdline.pytest() diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/jit/metainterp/executor.py b/pypy/jit/metainterp/executor.py --- a/pypy/jit/metainterp/executor.py +++ b/pypy/jit/metainterp/executor.py @@ -12,6 +12,7 @@ from pypy.jit.metainterp import resoperation from pypy.jit.metainterp.resoperation import rop from pypy.jit.metainterp.blackhole import BlackholeInterpreter, NULL +from pypy.jit.codewriter import longlong # ____________________________________________________________ @@ -29,7 +30,7 @@ else: args_i = None if count_r: args_r = [NULL] * count_r else: args_r = None - if count_f: args_f = [0.0] * count_f + if count_f: args_f = [longlong.ZEROF] * count_f else: args_f = None # fill in the lists count_i = count_r = count_f = 0 @@ -42,7 +43,7 @@ args_r[count_r] = box.getref_base() count_r += 1 elif box.type == FLOAT: - args_f[count_f] = box.getfloat() + args_f[count_f] = box.getfloatstorage() count_f += 1 # get the function address as an integer func = argboxes[0].getint() @@ -62,12 +63,12 @@ metainterp.execute_raised(e) result = NULL return BoxPtr(result) - if rettype == FLOAT: + if rettype == FLOAT or rettype == 'L': try: result = cpu.bh_call_f(func, descr, args_i, args_r, args_f) except Exception, e: metainterp.execute_raised(e) - result = 0.0 + result = longlong.ZEROF return BoxFloat(result) if rettype == VOID: try: @@ -109,7 +110,8 @@ cpu.bh_setarrayitem_gc_r(arraydescr, array, index, itembox.getref_base()) elif arraydescr.is_array_of_floats(): - cpu.bh_setarrayitem_gc_f(arraydescr, array, index, itembox.getfloat()) + cpu.bh_setarrayitem_gc_f(arraydescr, array, index, + itembox.getfloatstorage()) else: cpu.bh_setarrayitem_gc_i(arraydescr, array, index, itembox.getint()) @@ -118,7 +120,8 @@ index = indexbox.getint() assert not arraydescr.is_array_of_pointers() if arraydescr.is_array_of_floats(): - cpu.bh_setarrayitem_raw_f(arraydescr, array, index, itembox.getfloat()) + cpu.bh_setarrayitem_raw_f(arraydescr, array, index, + itembox.getfloatstorage()) else: cpu.bh_setarrayitem_raw_i(arraydescr, array, index, itembox.getint()) @@ -157,7 +160,7 @@ if fielddescr.is_pointer_field(): cpu.bh_setfield_gc_r(struct, fielddescr, itembox.getref_base()) elif fielddescr.is_float_field(): - cpu.bh_setfield_gc_f(struct, fielddescr, itembox.getfloat()) + cpu.bh_setfield_gc_f(struct, fielddescr, itembox.getfloatstorage()) else: cpu.bh_setfield_gc_i(struct, fielddescr, itembox.getint()) @@ -166,7 +169,7 @@ if fielddescr.is_pointer_field(): cpu.bh_setfield_raw_r(struct, fielddescr, itembox.getref_base()) elif fielddescr.is_float_field(): - cpu.bh_setfield_raw_f(struct, fielddescr, itembox.getfloat()) + cpu.bh_setfield_raw_f(struct, fielddescr, itembox.getfloatstorage()) else: cpu.bh_setfield_raw_i(struct, fielddescr, itembox.getint()) @@ -353,7 +356,7 @@ argboxes = argboxes[1:] if argtype == 'i': value = argbox.getint() elif argtype == 'r': value = argbox.getref_base() - elif argtype == 'f': value = argbox.getfloat() + elif argtype == 'f': value = argbox.getfloatstorage() newargs = newargs + (value,) assert not argboxes # diff --git a/py/_test/collect.py b/py/_test/collect.py deleted file mode 100644 --- a/py/_test/collect.py +++ /dev/null @@ -1,418 +0,0 @@ -""" -test collection nodes, forming a tree, Items are leafs. -""" -import py - -def configproperty(name): - def fget(self): - #print "retrieving %r property from %s" %(name, self.fspath) - return self.config._getcollectclass(name, self.fspath) - return property(fget) - -class HookProxy: - def __init__(self, node): - self.node = node - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - hookmethod = getattr(self.node.config.hook, name) - def call_matching_hooks(**kwargs): - plugins = self.node.config._getmatchingplugins(self.node.fspath) - return hookmethod.pcall(plugins, **kwargs) - return call_matching_hooks - -class Node(object): - """ base class for all Nodes in the collection tree. - Collector subclasses have children, Items are terminal nodes. - """ - def __init__(self, name, parent=None, config=None): - self.name = name - self.parent = parent - self.config = config or parent.config - self.fspath = getattr(parent, 'fspath', None) - self.ihook = HookProxy(self) - - def _reraiseunpicklingproblem(self): - if hasattr(self, '_unpickle_exc'): - py.builtin._reraise(*self._unpickle_exc) - - # - # note to myself: Pickling is uh. - # - def __getstate__(self): - return (self.name, self.parent) - def __setstate__(self, nameparent): - name, parent = nameparent - try: - colitems = parent._memocollect() - for colitem in colitems: - if colitem.name == name: - # we are a copy that will not be returned - # by our parent - self.__dict__ = colitem.__dict__ - break - else: - raise ValueError("item %r not found in parent collection %r" %( - name, [x.name for x in colitems])) - except KeyboardInterrupt: - raise - except Exception: - # our parent can't collect us but we want unpickling to - # otherwise continue - self._reraiseunpicklingproblem() will - # reraise the problem - self._unpickle_exc = py.std.sys.exc_info() - self.name = name - self.parent = parent - self.config = parent.config - - def __repr__(self): - if getattr(self.config.option, 'debug', False): - return "<%s %r %0x>" %(self.__class__.__name__, - getattr(self, 'name', None), id(self)) - else: - return "<%s %r>" %(self.__class__.__name__, - getattr(self, 'name', None)) - - # methods for ordering nodes - - def __eq__(self, other): - if not isinstance(other, Node): - return False - return self.name == other.name and self.parent == other.parent - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.name, self.parent)) - - def setup(self): - pass - - def teardown(self): - pass - - def _memoizedcall(self, attrname, function): - exattrname = "_ex_" + attrname - failure = getattr(self, exattrname, None) - if failure is not None: - py.builtin._reraise(failure[0], failure[1], failure[2]) - if hasattr(self, attrname): - return getattr(self, attrname) - try: - res = function() - except (KeyboardInterrupt, SystemExit): - raise - except: - failure = py.std.sys.exc_info() - setattr(self, exattrname, failure) - raise - setattr(self, attrname, res) - return res - - def listchain(self): - """ return list of all parent collectors up to self, - starting from root of collection tree. """ - l = [self] - while 1: - x = l[0] - if x.parent is not None and x.parent.parent is not None: - l.insert(0, x.parent) - else: - return l - - def listnames(self): - return [x.name for x in self.listchain()] - - def getparent(self, cls): - current = self - while current and not isinstance(current, cls): - current = current.parent - return current - - def readkeywords(self): - return dict([(x, True) for x in self._keywords()]) - - def _keywords(self): - return [self.name] - - def _skipbykeyword(self, keywordexpr): - """ return True if they given keyword expression means to - skip this collector/item. - """ - if not keywordexpr: - return - chain = self.listchain() - for key in filter(None, keywordexpr.split()): - eor = key[:1] == '-' - if eor: - key = key[1:] - if not (eor ^ self._matchonekeyword(key, chain)): - return True - - def _matchonekeyword(self, key, chain): - elems = key.split(".") - # XXX O(n^2), anyone cares? - chain = [item.readkeywords() for item in chain if item._keywords()] - for start, _ in enumerate(chain): - if start + len(elems) > len(chain): - return False - for num, elem in enumerate(elems): - for keyword in chain[num + start]: - ok = False - if elem in keyword: - ok = True - break - if not ok: - break - if num == len(elems) - 1 and ok: - return True - return False - - def _prunetraceback(self, traceback): - return traceback - - def _repr_failure_py(self, excinfo, style=None): - excinfo.traceback = self._prunetraceback(excinfo.traceback) - # XXX should excinfo.getrepr record all data and toterminal() - # process it? - if style is None: - if self.config.option.tbstyle == "short": - style = "short" - else: - style = "long" - return excinfo.getrepr(funcargs=True, - showlocals=self.config.option.showlocals, - style=style) - - repr_failure = _repr_failure_py - shortfailurerepr = "F" - -class Collector(Node): - """ - Collector instances create children through collect() - and thus iteratively build a tree. attributes:: - - parent: attribute pointing to the parent collector - (or None if this is the root collector) - name: basename of this collector object - """ - Directory = configproperty('Directory') - Module = configproperty('Module') - - def collect(self): - """ returns a list of children (items and collectors) - for this collection node. - """ - raise NotImplementedError("abstract") - - def collect_by_name(self, name): - """ return a child matching the given name, else None. """ - for colitem in self._memocollect(): - if colitem.name == name: - return colitem - - def repr_failure(self, excinfo, outerr=None): - """ represent a failure. """ - assert outerr is None, "XXX deprecated" - return self._repr_failure_py(excinfo) - - def _memocollect(self): - """ internal helper method to cache results of calling collect(). """ - return self._memoizedcall('_collected', self.collect) - - # ********************************************************************** - # DEPRECATED METHODS - # ********************************************************************** - - def _deprecated_collect(self): - # avoid recursion: - # collect -> _deprecated_collect -> custom run() -> - # super().run() -> collect - attrname = '_depcollectentered' - if hasattr(self, attrname): - return - setattr(self, attrname, True) - method = getattr(self.__class__, 'run', None) - if method is not None and method != Collector.run: - warnoldcollect(function=method) - names = self.run() - return [x for x in [self.join(name) for name in names] if x] - - def run(self): - """ DEPRECATED: returns a list of names available from this collector. - You can return an empty list. Callers of this method - must take care to catch exceptions properly. - """ - return [colitem.name for colitem in self._memocollect()] - - def join(self, name): - """ DEPRECATED: return a child collector or item for the given name. - If the return value is None there is no such child. - """ - return self.collect_by_name(name) - - def _prunetraceback(self, traceback): - if hasattr(self, 'fspath'): - path = self.fspath - ntraceback = traceback.cut(path=self.fspath) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - -class FSCollector(Collector): - def __init__(self, fspath, parent=None, config=None): - fspath = py.path.local(fspath) - super(FSCollector, self).__init__(fspath.basename, parent, config=config) - self.fspath = fspath - - def __getstate__(self): - # RootCollector.getbynames() inserts a directory which we need - # to throw out here for proper re-instantiation - if isinstance(self.parent.parent, RootCollector): - assert self.parent.fspath == self.parent.parent.fspath, self.parent - return (self.name, self.parent.parent) # shortcut - return super(Collector, self).__getstate__() - -class File(FSCollector): - """ base class for collecting tests from a file. """ - -class Directory(FSCollector): - def recfilter(self, path): - if path.check(dir=1, dotfile=0): - return path.basename not in ('CVS', '_darcs', '{arch}') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - l = [] - for path in self.fspath.listdir(sort=True): - res = self.consider(path) - if res is not None: - if isinstance(res, (list, tuple)): - l.extend(res) - else: - l.append(res) - return l - - def consider(self, path): - if self.ihook.pytest_ignore_collect(path=path, config=self.config): - return - if path.check(file=1): - res = self.consider_file(path) - elif path.check(dir=1): - res = self.consider_dir(path) - else: - res = None - if isinstance(res, list): - # throw out identical results - l = [] - for x in res: - if x not in l: - assert x.parent == self, (x.parent, self) - assert x.fspath == path, (x.fspath, path) - l.append(x) - res = l - return res - - def consider_file(self, path): - return self.ihook.pytest_collect_file(path=path, parent=self) - - def consider_dir(self, path, usefilters=None): - if usefilters is not None: - py.log._apiwarn("0.99", "usefilters argument not needed") - return self.ihook.pytest_collect_directory(path=path, parent=self) - -class Item(Node): - """ a basic test item. """ - def _deprecated_testexecution(self): - if self.__class__.run != Item.run: - warnoldtestrun(function=self.run) - elif self.__class__.execute != Item.execute: - warnoldtestrun(function=self.execute) - else: - return False - self.run() - return True - - def run(self): - """ deprecated, here because subclasses might call it. """ - return self.execute(self.obj) - - def execute(self, obj): - """ deprecated, here because subclasses might call it. """ - return obj() - - def reportinfo(self): - return self.fspath, None, "" - -def warnoldcollect(function=None): - py.log._apiwarn("1.0", - "implement collector.collect() instead of " - "collector.run() and collector.join()", - stacklevel=2, function=function) - -def warnoldtestrun(function=None): - py.log._apiwarn("1.0", - "implement item.runtest() instead of " - "item.run() and item.execute()", - stacklevel=2, function=function) - - - -class RootCollector(Directory): - def __init__(self, config): - Directory.__init__(self, config.topdir, parent=None, config=config) - self.name = None - - def __repr__(self): - return "" %(self.fspath,) - - def getbynames(self, names): - current = self.consider(self.config.topdir) - while names: - name = names.pop(0) - if name == ".": # special "identity" name - continue - l = [] - for x in current._memocollect(): - if x.name == name: - l.append(x) - elif x.fspath == current.fspath.join(name): - l.append(x) - elif x.name == "()": - names.insert(0, name) - l.append(x) - break - if not l: - raise ValueError("no node named %r below %r" %(name, current)) - current = l[0] - return current - - def totrail(self, node): - chain = node.listchain() - names = [self._getrelpath(chain[0].fspath)] - names += [x.name for x in chain[1:]] - return names - - def fromtrail(self, trail): - return self.config._rootcol.getbynames(trail) - - def _getrelpath(self, fspath): - topdir = self.config.topdir - relpath = fspath.relto(topdir) - if not relpath: - if fspath == topdir: - relpath = "." - else: - raise ValueError("%r not relative to topdir %s" - %(self.fspath, topdir)) - return relpath - - def __getstate__(self): - return self.config - - def __setstate__(self, config): - self.__init__(config) diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/rlib/jit.py b/pypy/rlib/jit.py --- a/pypy/rlib/jit.py +++ b/pypy/rlib/jit.py @@ -2,7 +2,7 @@ import sys from pypy.rpython.extregistry import ExtRegistryEntry from pypy.rlib.objectmodel import CDefinedIntSymbolic -from pypy.rlib.objectmodel import keepalive_until_here +from pypy.rlib.objectmodel import keepalive_until_here, specialize from pypy.rlib.unroll import unrolling_iterable from pypy.rlib.nonconst import NonConstant @@ -25,7 +25,14 @@ """ Hint for the JIT possible arguments are: - XXX + + * promote - promote the argument from a variable into a constant + * access_directly - directly access a virtualizable, as a structure + and don't treat it as a virtualizable + * fresh_virtualizable - means that virtualizable was just allocated. + Useful in say Frame.__init__ when we do want + to store things directly on it. Has to come with + access_directly=True """ return x @@ -106,7 +113,7 @@ flags['fresh_virtualizable'] = True s_x = annmodel.SomeInstance(s_x.classdef, s_x.can_be_None, - flags) + flags) return s_x def specialize_call(self, hop, **kwds_i): @@ -194,7 +201,7 @@ # VRefs def virtual_ref(x): - + """Creates a 'vref' object that contains a reference to 'x'. Calls to virtual_ref/virtual_ref_finish must be properly nested. The idea is that the object 'x' is supposed to be JITted as a virtual between @@ -256,22 +263,19 @@ class JitHintError(Exception): """Inconsistency in the JIT hints.""" -OPTIMIZER_SIMPLE = 0 -OPTIMIZER_NO_UNROLL = 1 -OPTIMIZER_FULL = 2 - PARAMETERS = {'threshold': 1000, 'trace_eagerness': 200, 'trace_limit': 10000, - 'inlining': False, - 'optimizer': OPTIMIZER_FULL, + 'inlining': 0, 'loop_longevity': 1000, + 'retrace_limit': 5, + 'enable_opts': None, # patched later by optimizeopt/__init__.py } -unroll_parameters = unrolling_iterable(PARAMETERS.keys()) +unroll_parameters = unrolling_iterable(PARAMETERS.items()) # ____________________________________________________________ -class JitDriver: +class JitDriver(object): """Base class to declare fine-grained user control on the JIT. So far, there must be a singleton instance of JitDriver. This style will allow us (later) to support a single RPython program with @@ -324,14 +328,14 @@ # (internal, must receive a constant 'name') assert name in PARAMETERS + @specialize.arg(0, 1) def set_param(self, name, value): """Set one of the tunable JIT parameter.""" - for name1 in unroll_parameters: + for name1, _ in unroll_parameters: if name1 == name: self._set_param(name1, value) return raise ValueError("no such parameter") - set_param._annspecialcase_ = 'specialize:arg(0)' def set_user_param(self, text): """Set the tunable JIT parameters from a user-supplied string @@ -343,12 +347,17 @@ parts = s.split('=') if len(parts) != 2: raise ValueError - try: - value = int(parts[1]) - except ValueError: - raise # re-raise the ValueError (annotator hint) name = parts[0] - self.set_param(name, value) + value = parts[1] + if name == 'enable_opts': + self.set_param('enable_opts', value) + else: + for name1, _ in unroll_parameters: + if name1 == name and name1 != 'enable_opts': + try: + self.set_param(name1, int(value)) + except ValueError: + raise set_user_param._annspecialcase_ = 'specialize:arg(0)' def _make_extregistryentries(self): @@ -529,15 +538,24 @@ def compute_result_annotation(self, s_name, s_value): from pypy.annotation import model as annmodel assert s_name.is_constant() - assert annmodel.SomeInteger().contains(s_value) + if s_name.const == 'enable_opts': + assert annmodel.SomeString(can_be_None=True).contains(s_value) + else: + assert annmodel.SomeInteger().contains(s_value) return annmodel.s_None def specialize_call(self, hop): from pypy.rpython.lltypesystem import lltype + from pypy.rpython.lltypesystem.rstr import string_repr + hop.exception_cannot_occur() driver = self.instance.im_self name = hop.args_s[0].const - v_value = hop.inputarg(lltype.Signed, arg=1) + if name == 'enable_opts': + repr = string_repr + else: + repr = lltype.Signed + v_value = hop.inputarg(repr, arg=1) vlist = [hop.inputconst(lltype.Void, "set_param"), hop.inputconst(lltype.Void, driver), hop.inputconst(lltype.Void, name), diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/py/bin/py.cleanup b/py/bin/py.cleanup deleted file mode 100755 --- a/py/bin/py.cleanup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycleanup() \ No newline at end of file diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -486,7 +486,6 @@ #'OOSEND', # ootype operation #'OOSEND_PURE', # ootype operation 'CALL_PURE/*d', # removed before it's passed to the backend - # CALL_PURE(result, func, arg_1,..,arg_n) '_CALL_LAST', '_CANRAISE_LAST', # ----- end of can_raise operations ----- diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/py/_plugin/hookspec.py b/py/_plugin/hookspec.py deleted file mode 100644 --- a/py/_plugin/hookspec.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -hook specifications for py.test plugins -""" - -# ------------------------------------------------------------------------- -# Command line and configuration -# ------------------------------------------------------------------------- - -def pytest_namespace(): - "return dict of name->object which will get stored at py.test. namespace" - -def pytest_addoption(parser): - "add optparse-style options via parser.addoption." - -def pytest_addhooks(pluginmanager): - "add hooks via pluginmanager.registerhooks(module)" - -def pytest_configure(config): - """ called after command line options have been parsed. - and all plugins and initial conftest files been loaded. - """ - -def pytest_unconfigure(config): - """ called before test process is exited. """ - -# ------------------------------------------------------------------------- -# collection hooks -# ------------------------------------------------------------------------- - -def pytest_ignore_collect(path, config): - """ return true value to prevent considering this path for collection. - This hook is consulted for all files and directories prior to considering - collection hooks. - """ -pytest_ignore_collect.firstresult = True - -def pytest_collect_directory(path, parent): - """ return Collection node or None for the given path. """ -pytest_collect_directory.firstresult = True - -def pytest_collect_file(path, parent): - """ return Collection node or None for the given path. """ - -def pytest_collectstart(collector): - """ collector starts collecting. """ - -def pytest_collectreport(report): - """ collector finished collecting. """ - -def pytest_deselected(items): - """ called for test items deselected by keyword. """ - -def pytest_make_collect_report(collector): - """ perform a collection and return a collection. """ -pytest_make_collect_report.firstresult = True - -# XXX rename to item_collected()? meaning in distribution context? -def pytest_itemstart(item, node=None): - """ test item gets collected. """ - -# ------------------------------------------------------------------------- -# Python test function related hooks -# ------------------------------------------------------------------------- - -def pytest_pycollect_makemodule(path, parent): - """ return a Module collector or None for the given path. - This hook will be called for each matching test module path. - The pytest_collect_file hook needs to be used if you want to - create test modules for files that do not match as a test module. - """ -pytest_pycollect_makemodule.firstresult = True - -def pytest_pycollect_makeitem(collector, name, obj): - """ return custom item/collector for a python object in a module, or None. """ -pytest_pycollect_makeitem.firstresult = True - -def pytest_pyfunc_call(pyfuncitem): - """ call underlying test function. """ -pytest_pyfunc_call.firstresult = True - -def pytest_generate_tests(metafunc): - """ generate (multiple) parametrized calls to a test function.""" - -# ------------------------------------------------------------------------- -# generic runtest related hooks -# ------------------------------------------------------------------------- - -def pytest_runtest_protocol(item): - """ implement fixture, run and report about the given test item. """ -pytest_runtest_protocol.firstresult = True - -def pytest_runtest_setup(item): - """ called before pytest_runtest_call(). """ - -def pytest_runtest_call(item): - """ execute test item. """ - -def pytest_runtest_teardown(item): - """ called after pytest_runtest_call(). """ - -def pytest_runtest_makereport(item, call): - """ make a test report for the given item and call outcome. """ -pytest_runtest_makereport.firstresult = True - -def pytest_runtest_logreport(report): - """ process item test report. """ - -# special handling for final teardown - somewhat internal for now -def pytest__teardown_final(session): - """ called before test session finishes. """ -pytest__teardown_final.firstresult = True - -def pytest__teardown_final_logerror(report): - """ called if runtest_teardown_final failed. """ - -# ------------------------------------------------------------------------- -# test session related hooks -# ------------------------------------------------------------------------- - -def pytest_sessionstart(session): - """ before session.main() is called. """ - -def pytest_sessionfinish(session, exitstatus): - """ whole test run finishes. """ - -# ------------------------------------------------------------------------- -# hooks for influencing reporting (invoked from pytest_terminal) -# ------------------------------------------------------------------------- - -def pytest_report_header(config): - """ return a string to be displayed as header info for terminal reporting.""" - -def pytest_report_teststatus(report): - """ return result-category, shortletter and verbose word for reporting.""" -pytest_report_teststatus.firstresult = True - -def pytest_terminal_summary(terminalreporter): - """ add additional section in terminal summary reporting. """ - -def pytest_report_iteminfo(item): - """ return (fspath, lineno, name) for the item. - the information is used for result display and to sort tests - """ -pytest_report_iteminfo.firstresult = True - -# ------------------------------------------------------------------------- -# doctest hooks -# ------------------------------------------------------------------------- - -def pytest_doctest_prepare_content(content): - """ return processed content for a given doctest""" -pytest_doctest_prepare_content.firstresult = True - - -# ------------------------------------------------------------------------- -# error handling and internal debugging hooks -# ------------------------------------------------------------------------- - -def pytest_plugin_registered(plugin, manager): - """ a new py lib plugin got registered. """ - -def pytest_plugin_unregistered(plugin): - """ a py lib plugin got unregistered. """ - -def pytest_internalerror(excrepr): - """ called for internal errors. """ - -def pytest_keyboard_interrupt(excinfo): - """ called for keyboard interrupt. """ - -def pytest_trace(category, msg): - """ called for debug info. """ diff --git a/py/_plugin/pytest_genscript.py b/py/_plugin/pytest_genscript.py deleted file mode 100755 --- a/py/_plugin/pytest_genscript.py +++ /dev/null @@ -1,69 +0,0 @@ -#! /usr/bin/env python -""" -generate standalone test script to be distributed along with an application. -""" - -import os -import sys -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group.addoption("--genscript", action="store", default=None, - dest="genscript", metavar="path", - help="create standalone py.test script at given target path.") - -def pytest_configure(config): - genscript = config.getvalue("genscript") - if genscript: - import py - mydir = py.path.local(__file__).dirpath() - infile = mydir.join("standalonetemplate.py") - pybasedir = py.path.local(py.__file__).dirpath().dirpath() - genscript = py.path.local(genscript) - main(pybasedir, outfile=genscript, infile=infile) - raise SystemExit(0) - -def main(pybasedir, outfile, infile): - import base64 - import zlib - try: - import pickle - except Importerror: - import cPickle as pickle - - outfile = str(outfile) - infile = str(infile) - assert os.path.isabs(outfile) - os.chdir(str(pybasedir)) - files = [] - for dirpath, dirnames, filenames in os.walk("py"): - for f in filenames: - if not f.endswith(".py"): - continue - - fn = os.path.join(dirpath, f) - files.append(fn) - - name2src = {} - for f in files: - k = f.replace(os.sep, ".")[:-3] - name2src[k] = open(f, "r").read() - - data = pickle.dumps(name2src, 2) - data = zlib.compress(data, 9) - data = base64.encodestring(data) - data = data.decode("ascii") - - exe = open(infile, "r").read() - exe = exe.replace("@SOURCES@", data) - - open(outfile, "w").write(exe) - os.chmod(outfile, 493) # 0755 - sys.stdout.write("generated standalone py.test at %r, have fun!\n" % outfile) - -if __name__=="__main__": - dn = os.path.dirname - here = os.path.abspath(dn(__file__)) # py/plugin/ - pybasedir = dn(dn(here)) - outfile = os.path.join(os.getcwd(), "py.test-standalone") - infile = os.path.join(here, 'standalonetemplate.py') - main(pybasedir, outfile, infile) diff --git a/py/bin/py.convert_unittest b/py/bin/py.convert_unittest deleted file mode 100755 --- a/py/bin/py.convert_unittest +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pyconvert_unittest() \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/py/bin/win32/py.svnwcrevert.cmd b/py/bin/win32/py.svnwcrevert.cmd deleted file mode 100644 --- a/py/bin/win32/py.svnwcrevert.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.svnwcrevert" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/py/_code/oldmagic2.py b/py/_code/oldmagic2.py deleted file mode 100644 --- a/py/_code/oldmagic2.py +++ /dev/null @@ -1,6 +0,0 @@ - -import py - -py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2) - -from py.code import _AssertionError as AssertionError diff --git a/py/_plugin/pytest_default.py b/py/_plugin/pytest_default.py deleted file mode 100644 --- a/py/_plugin/pytest_default.py +++ /dev/null @@ -1,131 +0,0 @@ -""" default hooks and general py.test options. """ - -import sys -import py - -def pytest_pyfunc_call(__multicall__, pyfuncitem): - if not __multicall__.execute(): - testfunction = pyfuncitem.obj - if pyfuncitem._isyieldedfunction(): - testfunction(*pyfuncitem._args) - else: - funcargs = pyfuncitem.funcargs - testfunction(**funcargs) - -def pytest_collect_file(path, parent): - ext = path.ext - pb = path.purebasename - if pb.startswith("test_") or pb.endswith("_test") or \ - path in parent.config._argfspaths: - if ext == ".py": - return parent.ihook.pytest_pycollect_makemodule( - path=path, parent=parent) - -def pytest_pycollect_makemodule(path, parent): - return parent.Module(path, parent) - -def pytest_funcarg__pytestconfig(request): - """ the pytest config object with access to command line opts.""" - return request.config - -def pytest_ignore_collect(path, config): - ignore_paths = config.getconftest_pathlist("collect_ignore", path=path) - ignore_paths = ignore_paths or [] - excludeopt = config.getvalue("ignore") - if excludeopt: - ignore_paths.extend([py.path.local(x) for x in excludeopt]) - return path in ignore_paths - # XXX more refined would be: - if ignore_paths: - for p in ignore_paths: - if path == p or path.relto(p): - return True - - -def pytest_collect_directory(path, parent): - # XXX reconsider the following comment - # not use parent.Directory here as we generally - # want dir/conftest.py to be able to - # define Directory(dir) already - if not parent.recfilter(path): # by default special ".cvs", ... - # check if cmdline specified this dir or a subdir directly - for arg in parent.config._argfspaths: - if path == arg or arg.relto(path): - break - else: - return - Directory = parent.config._getcollectclass('Directory', path) - return Directory(path, parent=parent) - -def pytest_report_iteminfo(item): - return item.reportinfo() - -def pytest_addoption(parser): - group = parser.getgroup("general", "running and selection options") - group._addoption('-x', '--exitfirst', action="store_true", default=False, - dest="exitfirst", - help="exit instantly on first error or failed test."), - group._addoption('--maxfail', metavar="num", - action="store", type="int", dest="maxfail", default=0, - help="exit after first num failures or errors.") - group._addoption('-k', - action="store", dest="keyword", default='', - help="only run test items matching the given " - "space separated keywords. precede a keyword with '-' to negate. " - "Terminate the expression with ':' to treat a match as a signal " - "to run all subsequent tests. ") - - group = parser.getgroup("collect", "collection") - group.addoption('--collectonly', - action="store_true", dest="collectonly", - help="only collect tests, don't execute them."), - group.addoption("--ignore", action="append", metavar="path", - help="ignore path during collection (multi-allowed).") - group.addoption('--confcutdir', dest="confcutdir", default=None, - metavar="dir", - help="only load conftest.py's relative to specified dir.") - - group = parser.getgroup("debugconfig", - "test process debugging and configuration") - group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", - help="base temporary directory for this test run.") - -def pytest_configure(config): - setsession(config) - # compat - if config.getvalue("exitfirst"): - config.option.maxfail = 1 - -def setsession(config): - val = config.getvalue - if val("collectonly"): - from py._test.session import Session - config.setsessionclass(Session) - -# pycollect related hooks and code, should move to pytest_pycollect.py - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - res = __multicall__.execute() - if res is not None: - return res - if collector._istestclasscandidate(name, obj): - res = collector._deprecated_join(name) - if res is not None: - return res - return collector.Class(name, parent=collector) - elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): - res = collector._deprecated_join(name) - if res is not None: - return res - if is_generator(obj): - # XXX deprecation warning - return collector.Generator(name, parent=collector) - else: - return collector._genfunctions(name, obj) - -def is_generator(func): - try: - return py.code.getrawcode(func).co_flags & 32 # generator function - except AttributeError: # builtin functions have no bytecode - # assume them to not be generators - return False diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -227,6 +227,14 @@ assert isinstance(y, int) return x | y +def op_int_xor(x, y): + # used in computing hashes + if isinstance(x, AddressAsInt): x = llmemory.cast_adr_to_int(x.adr) + if isinstance(y, AddressAsInt): y = llmemory.cast_adr_to_int(y.adr) + assert isinstance(x, int) + assert isinstance(y, int) + return x ^ y + def op_int_mul(x, y): assert isinstance(x, (int, llmemory.AddressOffset)) assert isinstance(y, (int, llmemory.AddressOffset)) diff --git a/py/_path/gateway/remotepath.py b/py/_path/gateway/remotepath.py deleted file mode 100644 --- a/py/_path/gateway/remotepath.py +++ /dev/null @@ -1,47 +0,0 @@ -import py, itertools -from py._path import common - -COUNTER = itertools.count() - -class RemotePath(common.PathBase): - sep = '/' - - def __init__(self, channel, id, basename=None): - self._channel = channel - self._id = id - self._basename = basename - self._specs = {} - - def __del__(self): - self._channel.send(('DEL', self._id)) - - def __repr__(self): - return 'RemotePath(%s)' % self.basename - - def listdir(self, *args): - self._channel.send(('LIST', self._id) + args) - return [RemotePath(self._channel, id, basename) - for (id, basename) in self._channel.receive()] - - def dirpath(self): - id = ~COUNTER.next() - self._channel.send(('DIRPATH', self._id, id)) - return RemotePath(self._channel, id) - - def join(self, *args): - id = ~COUNTER.next() - self._channel.send(('JOIN', self._id, id) + args) - return RemotePath(self._channel, id) - - def _getbyspec(self, spec): - parts = spec.split(',') - ask = [x for x in parts if x not in self._specs] - if ask: - self._channel.send(('GET', self._id, ",".join(ask))) - for part, value in zip(ask, self._channel.receive()): - self._specs[part] = value - return [self._specs[x] for x in parts] - - def read(self): - self._channel.send(('READ', self._id)) - return self._channel.receive() diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -455,12 +455,12 @@ CMP_mi8 = insn(rex_w, '\x83', orbyte(7<<3), mem_reg_plus_const(1), immediate(2, 'b')) CMP_mi32 = insn(rex_w, '\x81', orbyte(7<<3), mem_reg_plus_const(1), immediate(2)) CMP_mi = select_8_or_32_bit_immed(CMP_mi8, CMP_mi32) - CMP_mr = insn(rex_w, '\x39', register(2, 8), mem_reg_plus_const(1)) CMP_ji8 = insn(rex_w, '\x83', '\x3D', immediate(1), immediate(2, 'b')) CMP_ji32 = insn(rex_w, '\x81', '\x3D', immediate(1), immediate(2)) CMP_ji = select_8_or_32_bit_immed(CMP_ji8, CMP_ji32) + CMP_jr = insn(rex_w, '\x39', register(2, 8), '\x05', immediate(1)) CMP32_mi = insn(rex_nw, '\x81', orbyte(7<<3), mem_reg_plus_const(1), immediate(2)) @@ -516,8 +516,8 @@ # XXX: Only here for testing purposes..."as" happens the encode the # registers in the opposite order that we would otherwise do in a - # register-register exchange - XCHG_rr = insn(rex_w, '\x87', register(1), register(2,8), '\xC0') + # register-register exchange. + #XCHG_rr = insn(rex_w, '\x87', register(1), register(2,8), '\xC0') JMP_l = insn('\xE9', relative(1)) JMP_r = insn(rex_nw, '\xFF', orbyte(4<<3), register(1), '\xC0') @@ -551,7 +551,8 @@ MOVD_rx = xmminsn('\x66', rex_w, '\x0F\x7E', register(2, 8), register(1), '\xC0') MOVD_xr = xmminsn('\x66', rex_w, '\x0F\x6E', register(1, 8), register(2), '\xC0') - PMOVMSKB_rx = xmminsn('\x66', rex_nw, '\x0F\xD7', register(1, 8), register(2), '\xC0') + + PSRAD_xi = xmminsn('\x66', rex_nw, '\x0F\x72', register(1), '\xE0', immediate(2, 'b')) # ------------------------------------------------------------ @@ -580,6 +581,8 @@ class X86_32_CodeBuilder(AbstractX86CodeBuilder): WORD = 4 + PMOVMSKB_rx = xmminsn('\x66', rex_nw, '\x0F\xD7', register(1, 8), register(2), '\xC0') + class X86_64_CodeBuilder(AbstractX86CodeBuilder): WORD = 8 @@ -661,7 +664,7 @@ define_modrm_modes('MOVAPD_*x', ['\x66', rex_nw, '\x0F\x29', register(2,8)], regtype='XMM') -define_modrm_modes('XCHG_r*', [rex_w, '\x87', register(1, 8)]) +#define_modrm_modes('XCHG_r*', [rex_w, '\x87', register(1, 8)]) define_modrm_modes('ADDSD_x*', ['\xF2', rex_nw, '\x0F\x58', register(1, 8)], regtype='XMM') define_modrm_modes('ADDPD_x*', ['\x66', rex_nw, '\x0F\x58', register(1, 8)], regtype='XMM') diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to mantain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microfost ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus mantaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've choosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowExcepion when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involed because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentally renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/py/_plugin/pytest__pytest.py b/py/_plugin/pytest__pytest.py deleted file mode 100644 --- a/py/_plugin/pytest__pytest.py +++ /dev/null @@ -1,101 +0,0 @@ -import py - -from py._test.pluginmanager import HookRelay - -def pytest_funcarg___pytest(request): - return PytestArg(request) - -class PytestArg: - def __init__(self, request): - self.request = request - - def gethookrecorder(self, hook): - hookrecorder = HookRecorder(hook._registry) - hookrecorder.start_recording(hook._hookspecs) - self.request.addfinalizer(hookrecorder.finish_recording) - return hookrecorder - -class ParsedCall: - def __init__(self, name, locals): - assert '_name' not in locals - self.__dict__.update(locals) - self.__dict__.pop('self') - self._name = name - - def __repr__(self): - d = self.__dict__.copy() - del d['_name'] - return "" %(self._name, d) - -class HookRecorder: - def __init__(self, registry): - self._registry = registry - self.calls = [] - self._recorders = {} - - def start_recording(self, hookspecs): - if not isinstance(hookspecs, (list, tuple)): - hookspecs = [hookspecs] - for hookspec in hookspecs: - assert hookspec not in self._recorders - class RecordCalls: - _recorder = self - for name, method in vars(hookspec).items(): - if name[0] != "_": - setattr(RecordCalls, name, self._makecallparser(method)) - recorder = RecordCalls() - self._recorders[hookspec] = recorder - self._registry.register(recorder) - self.hook = HookRelay(hookspecs, registry=self._registry, - prefix="pytest_") - - def finish_recording(self): - for recorder in self._recorders.values(): - self._registry.unregister(recorder) - self._recorders.clear() - - def _makecallparser(self, method): - name = method.__name__ - args, varargs, varkw, default = py.std.inspect.getargspec(method) - if not args or args[0] != "self": - args.insert(0, 'self') - fspec = py.std.inspect.formatargspec(args, varargs, varkw, default) - # we use exec because we want to have early type - # errors on wrong input arguments, using - # *args/**kwargs delays this and gives errors - # elsewhere - exec (py.code.compile(""" - def %(name)s%(fspec)s: - self._recorder.calls.append( - ParsedCall(%(name)r, locals())) - """ % locals())) - return locals()[name] - - def getcalls(self, names): - if isinstance(names, str): - names = names.split() - for name in names: - for cls in self._recorders: - if name in vars(cls): - break - else: - raise ValueError("callname %r not found in %r" %( - name, self._recorders.keys())) - l = [] - for call in self.calls: - if call._name in names: - l.append(call) - return l - - def popcall(self, name): - for i, call in enumerate(self.calls): - if call._name == name: - del self.calls[i] - return call - raise ValueError("could not find call %r" %(name, )) - - def getcall(self, name): - l = self.getcalls(name) - assert len(l) == 1, (name, l) - return l[0] - diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/py/bin/win32/py.countloc.cmd b/py/bin/win32/py.countloc.cmd deleted file mode 100644 --- a/py/bin/win32/py.countloc.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.countloc" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possibile -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/py/bin/win32/py.which.cmd b/py/bin/win32/py.which.cmd deleted file mode 100644 --- a/py/bin/win32/py.which.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.which" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/py/_plugin/pytest_tmpdir.py b/py/_plugin/pytest_tmpdir.py deleted file mode 100644 --- a/py/_plugin/pytest_tmpdir.py +++ /dev/null @@ -1,22 +0,0 @@ -"""provide temporary directories to test functions. - -usage example:: - - def test_plugin(tmpdir): - tmpdir.join("hello").write("hello") - -.. _`py.path.local`: ../../path.html - -""" -import py - -def pytest_funcarg__tmpdir(request): - """return a temporary directory path object - unique to each test function invocation, - created as a sub directory of the base temporary - directory. The returned object is a `py.path.local`_ - path object. - """ - name = request.function.__name__ - x = request.config.mktemp(name, numbered=True) - return x.realpath() diff --git a/py/bin/win32/py.cleanup.cmd b/py/bin/win32/py.cleanup.cmd deleted file mode 100644 --- a/py/bin/win32/py.cleanup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.cleanup" %* \ No newline at end of file diff --git a/pypy/module/micronumpy/numarray.py b/pypy/module/micronumpy/numarray.py --- a/pypy/module/micronumpy/numarray.py +++ b/pypy/module/micronumpy/numarray.py @@ -1,4 +1,3 @@ - from pypy.interpreter.baseobjspace import ObjSpace, W_Root, Wrappable from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.typedef import TypeDef @@ -16,9 +15,9 @@ class ComputationFrame(object): _virtualizable2_ = ['valuestackdepth', 'valuestack[*]', 'local_pos', 'locals[*]'] - + def __init__(self, input): - self = jit.hint(self, access_directly=True, fresh_virtualizable=True) + self = jit.hint(self, access_directly=True, fresh_virtualizable=True) self.valuestackdepth = 0 self.valuestack = [0.0] * len(input) self.locals = input[:] @@ -79,7 +78,7 @@ return result JITCODES = {} - + class BaseArray(Wrappable): def force(self): bytecode, stack = self.compile() @@ -91,11 +90,9 @@ # otherwise we have to compile new assembler each time, which sucks # (we still have to compile new bytecode, but too bad) return compute(bytecode, stack) - force.unwrap_spec = ['self'] def descr_add(self, space, w_other): return space.wrap(Add(self, w_other)) - descr_add.unwrap_spec = ['self', ObjSpace, W_Root] def compile(self): raise NotImplementedError("abstract base class") @@ -128,6 +125,7 @@ def compile(self): return "l", [self] + @unwrap_spec(item=int) def descr_getitem(self, space, item): if item < 0: raise operationerrfmt(space.w_TypeError, @@ -136,8 +134,8 @@ raise operationerrfmt(space.w_TypeError, '%d above array size', item) return space.wrap(self.storage[item]) - descr_getitem.unwrap_spec = ['self', ObjSpace, int] + @unwrap_spec(item=int, value=float) def descr_setitem(self, space, item, value): if item < 0: raise operationerrfmt(space.w_TypeError, @@ -146,7 +144,6 @@ raise operationerrfmt(space.w_TypeError, '%d above array size', item) self.storage[item] = value - descr_setitem.unwrap_spec = ['self', ObjSpace, int, float] def force(self): return self diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasising a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/py/_test/session.py b/py/_test/session.py deleted file mode 100644 --- a/py/_test/session.py +++ /dev/null @@ -1,135 +0,0 @@ -""" basic test session implementation. - -* drives collection of tests -* triggers executions of tests -* produces events used by reporting -""" - -import py - -# exitcodes for the command line -EXIT_OK = 0 -EXIT_TESTSFAILED = 1 -EXIT_INTERRUPTED = 2 -EXIT_INTERNALERROR = 3 -EXIT_NOHOSTS = 4 - -# imports used for genitems() -Item = py.test.collect.Item -Collector = py.test.collect.Collector - -class Session(object): - nodeid = "" - class Interrupted(KeyboardInterrupt): - """ signals an interrupted test run. """ - __module__ = 'builtins' # for py3 - - def __init__(self, config): - self.config = config - self.pluginmanager = config.pluginmanager # shortcut - self.pluginmanager.register(self) - self._testsfailed = 0 - self._nomatch = False - self.shouldstop = False - - def genitems(self, colitems, keywordexpr=None): - """ yield Items from iterating over the given colitems. """ - if colitems: - colitems = list(colitems) - while colitems: - next = colitems.pop(0) - if isinstance(next, (tuple, list)): - colitems[:] = list(next) + colitems - continue - assert self.pluginmanager is next.config.pluginmanager - if isinstance(next, Item): - remaining = self.filteritems([next]) - if remaining: - self.config.hook.pytest_itemstart(item=next) - yield next - else: - assert isinstance(next, Collector) - self.config.hook.pytest_collectstart(collector=next) - rep = self.config.hook.pytest_make_collect_report(collector=next) - if rep.passed: - for x in self.genitems(rep.result, keywordexpr): - yield x - self.config.hook.pytest_collectreport(report=rep) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - - def filteritems(self, colitems): - """ return items to process (some may be deselected)""" - keywordexpr = self.config.option.keyword - if not keywordexpr or self._nomatch: - return colitems - if keywordexpr[-1] == ":": - keywordexpr = keywordexpr[:-1] - remaining = [] - deselected = [] - for colitem in colitems: - if isinstance(colitem, Item): - if colitem._skipbykeyword(keywordexpr): - deselected.append(colitem) - continue - remaining.append(colitem) - if deselected: - self.config.hook.pytest_deselected(items=deselected) - if self.config.option.keyword.endswith(":"): - self._nomatch = True - return remaining - - def collect(self, colitems): - keyword = self.config.option.keyword - for x in self.genitems(colitems, keyword): - yield x - - def sessionstarts(self): - """ setup any neccessary resources ahead of the test run. """ - self.config.hook.pytest_sessionstart(session=self) - - def pytest_runtest_logreport(self, report): - if report.failed: - self._testsfailed += 1 - maxfail = self.config.getvalue("maxfail") - if maxfail and self._testsfailed >= maxfail: - self.shouldstop = "stopping after %d failures" % ( - self._testsfailed) - pytest_collectreport = pytest_runtest_logreport - - def sessionfinishes(self, exitstatus): - """ teardown any resources after a test run. """ - self.config.hook.pytest_sessionfinish( - session=self, - exitstatus=exitstatus, - ) - - def main(self, colitems): - """ main loop for running tests. """ - self.shouldstop = False - self.sessionstarts() - exitstatus = EXIT_OK - try: - self._mainloop(colitems) - if self._testsfailed: - exitstatus = EXIT_TESTSFAILED - self.sessionfinishes(exitstatus=exitstatus) - except KeyboardInterrupt: - excinfo = py.code.ExceptionInfo() - self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo) - exitstatus = EXIT_INTERRUPTED - except: - excinfo = py.code.ExceptionInfo() - self.config.pluginmanager.notify_exception(excinfo) - exitstatus = EXIT_INTERNALERROR - if exitstatus in (EXIT_INTERNALERROR, EXIT_INTERRUPTED): - self.sessionfinishes(exitstatus=exitstatus) - return exitstatus - - def _mainloop(self, colitems): - for item in self.collect(colitems): - if not self.config.option.collectonly: - item.config.hook.pytest_runtest_protocol(item=item) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/py/bin/py.lookup b/py/bin/py.lookup deleted file mode 100755 --- a/py/bin/py.lookup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pylookup() \ No newline at end of file diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/py/_plugin/pytest_junitxml.py b/py/_plugin/pytest_junitxml.py deleted file mode 100644 --- a/py/_plugin/pytest_junitxml.py +++ /dev/null @@ -1,171 +0,0 @@ -""" - logging of test results in JUnit-XML format, for use with Hudson - and build integration servers. Based on initial code from Ross Lawley. -""" - -import py -import time - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group.addoption('--junitxml', action="store", dest="xmlpath", - metavar="path", default=None, - help="create junit-xml style report file at given path.") - -def pytest_configure(config): - xmlpath = config.option.xmlpath - if xmlpath: - config._xml = LogXML(xmlpath) - config.pluginmanager.register(config._xml) - -def pytest_unconfigure(config): - xml = getattr(config, '_xml', None) - if xml: - del config._xml - config.pluginmanager.unregister(xml) - -class LogXML(object): - def __init__(self, logfile): - self.logfile = logfile - self.test_logs = [] - self.passed = self.skipped = 0 - self.failed = self.errors = 0 - self._durations = {} - - def _opentestcase(self, report): - node = report.item - d = {'time': self._durations.pop(report.item, "0")} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def _closetestcase(self): - self.test_logs.append("") - - def appendlog(self, fmt, *args): - args = tuple([py.xml.escape(arg) for arg in args]) - self.test_logs.append(fmt % args) - - def append_pass(self, report): - self.passed += 1 - self._opentestcase(report) - self._closetestcase() - - def append_failure(self, report): - self._opentestcase(report) - #msg = str(report.longrepr.reprtraceback.extraline) - if "xfail" in report.keywords: - self.appendlog( - '') - self.skipped += 1 - else: - self.appendlog('%s', - report.longrepr) - self.failed += 1 - self._closetestcase() - - def _opentestcase_collectfailure(self, report): - node = report.collector - d = {'time': '???'} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def append_collect_failure(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_collect_skipped(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.skipped += 1 - - def append_error(self, report): - self._opentestcase(report) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_skipped(self, report): - self._opentestcase(report) - if "xfail" in report.keywords: - self.appendlog( - '%s', - report.keywords['xfail']) - else: - self.appendlog("") - self._closetestcase() - self.skipped += 1 - - def pytest_runtest_logreport(self, report): - if report.passed: - self.append_pass(report) - elif report.failed: - if report.when != "call": - self.append_error(report) - else: - self.append_failure(report) - elif report.skipped: - self.append_skipped(report) - - def pytest_runtest_call(self, item, __multicall__): - start = time.time() - try: - return __multicall__.execute() - finally: - self._durations[item] = time.time() - start - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.append_collect_failure(report) - else: - self.append_collect_skipped(report) - - def pytest_internalerror(self, excrepr): - self.errors += 1 - data = py.xml.escape(excrepr) - self.test_logs.append( - '\n' - ' ' - '%s' % data) - - def pytest_sessionstart(self, session): - self.suite_start_time = time.time() - - def pytest_sessionfinish(self, session, exitstatus, __multicall__): - if py.std.sys.version_info[0] < 3: - logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8') - else: - logfile = open(self.logfile, 'w', encoding='utf-8') - - suite_stop_time = time.time() - suite_time_delta = suite_stop_time - self.suite_start_time - numtests = self.passed + self.failed - logfile.write('') - logfile.write('') - logfile.writelines(self.test_logs) - logfile.write('') - logfile.close() - tw = session.config.pluginmanager.getplugin("terminalreporter")._tw - tw.line() - tw.sep("-", "generated xml file: %s" %(self.logfile)) diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanise described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimisation for list comprehensions in RPython. - diff --git a/py/_plugin/pytest_capture.py b/py/_plugin/pytest_capture.py deleted file mode 100644 --- a/py/_plugin/pytest_capture.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -configurable per-test stdout/stderr capturing mechanisms. - -This plugin captures stdout/stderr output for each test separately. -In case of test failures this captured output is shown grouped -togtther with the test. - -The plugin also provides test function arguments that help to -assert stdout/stderr output from within your tests, see the -`funcarg example`_. - - -Capturing of input/output streams during tests ---------------------------------------------------- - -By default ``sys.stdout`` and ``sys.stderr`` are substituted with -temporary streams during the execution of tests and setup/teardown code. -During the whole testing process it will re-use the same temporary -streams allowing to play well with the logging module which easily -takes ownership on these streams. - -Also, 'sys.stdin' is substituted with a file-like "null" object that -does not return any values. This is to immediately error out -on tests that wait on reading something from stdin. - -You can influence output capturing mechanisms from the command line:: - - py.test -s # disable all capturing - py.test --capture=sys # replace sys.stdout/stderr with in-mem files - py.test --capture=fd # point filedescriptors 1 and 2 to temp file - -If you set capturing values in a conftest file like this:: - - # conftest.py - option_capture = 'fd' - -then all tests in that directory will execute with "fd" style capturing. - -sys-level capturing ------------------------------------------- - -Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` -will be replaced with in-memory files (``py.io.TextIO`` to be precise) -that capture writes and decode non-unicode strings to a unicode object -(using a default, usually, UTF-8, encoding). - -FD-level capturing and subprocesses ------------------------------------------- - -The ``fd`` based method means that writes going to system level files -based on the standard file descriptors will be captured, for example -writes such as ``os.write(1, 'hello')`` will be captured properly. -Capturing on fd-level will include output generated from -any subprocesses created during a test. - -.. _`funcarg example`: - -Example Usage of the capturing Function arguments ---------------------------------------------------- - -You can use the `capsys funcarg`_ and `capfd funcarg`_ to -capture writes to stdout and stderr streams. Using the -funcargs frees your test from having to care about setting/resetting -the old streams and also interacts well with py.test's own -per-test capturing. Here is an example test function: - -.. sourcecode:: python - - def test_myoutput(capsys): - print ("hello") - sys.stderr.write("world\\n") - out, err = capsys.readouterr() - assert out == "hello\\n" - assert err == "world\\n" - print "next" - out, err = capsys.readouterr() - assert out == "next\\n" - -The ``readouterr()`` call snapshots the output so far - -and capturing will be continued. After the test -function finishes the original streams will -be restored. If you want to capture on -the filedescriptor level you can use the ``capfd`` function -argument which offers the same interface. -""" - -import py -import os - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--capture', action="store", default=None, - metavar="method", type="choice", choices=['fd', 'sys', 'no'], - help="per-test capturing method: one of fd (default)|sys|no.") - group._addoption('-s', action="store_const", const="no", dest="capture", - help="shortcut for --capture=no.") - -def addouterr(rep, outerr): - repr = getattr(rep, 'longrepr', None) - if not hasattr(repr, 'addsection'): - return - for secname, content in zip(["out", "err"], outerr): - if content: - repr.addsection("Captured std%s" % secname, content.rstrip()) - -def pytest_configure(config): - config.pluginmanager.register(CaptureManager(), 'capturemanager') - -class NoCapture: - def startall(self): - pass - def resume(self): - pass - def suspend(self): - return "", "" - -class CaptureManager: - def __init__(self): - self._method2capture = {} - - def _maketempfile(self): - f = py.std.tempfile.TemporaryFile() - newf = py.io.dupfile(f, encoding="UTF-8") - return newf - - def _makestringio(self): - return py.io.TextIO() - - def _getcapture(self, method): - if method == "fd": - return py.io.StdCaptureFD(now=False, - out=self._maketempfile(), err=self._maketempfile() - ) - elif method == "sys": - return py.io.StdCapture(now=False, - out=self._makestringio(), err=self._makestringio() - ) - elif method == "no": - return NoCapture() - else: - raise ValueError("unknown capturing method: %r" % method) - - def _getmethod(self, config, fspath): - if config.option.capture: - method = config.option.capture - else: - try: - method = config._conftest.rget("option_capture", path=fspath) - except KeyError: - method = "fd" - if method == "fd" and not hasattr(os, 'dup'): # e.g. jython - method = "sys" - return method - - def resumecapture_item(self, item): - method = self._getmethod(item.config, item.fspath) - if not hasattr(item, 'outerr'): - item.outerr = ('', '') # we accumulate outerr on the item - return self.resumecapture(method) - - def resumecapture(self, method): - if hasattr(self, '_capturing'): - raise ValueError("cannot resume, already capturing with %r" % - (self._capturing,)) - cap = self._method2capture.get(method) - self._capturing = method - if cap is None: - self._method2capture[method] = cap = self._getcapture(method) - cap.startall() - else: - cap.resume() - - def suspendcapture(self, item=None): - self.deactivate_funcargs() - if hasattr(self, '_capturing'): - method = self._capturing - cap = self._method2capture.get(method) - if cap is not None: - outerr = cap.suspend() - del self._capturing - if item: - outerr = (item.outerr[0] + outerr[0], - item.outerr[1] + outerr[1]) - return outerr - return "", "" - - def activate_funcargs(self, pyfuncitem): - if not hasattr(pyfuncitem, 'funcargs'): - return - assert not hasattr(self, '_capturing_funcargs') - self._capturing_funcargs = capturing_funcargs = [] - for name, capfuncarg in pyfuncitem.funcargs.items(): - if name in ('capsys', 'capfd'): - capturing_funcargs.append(capfuncarg) - capfuncarg._start() - - def deactivate_funcargs(self): - capturing_funcargs = getattr(self, '_capturing_funcargs', None) - if capturing_funcargs is not None: - while capturing_funcargs: - capfuncarg = capturing_funcargs.pop() - capfuncarg._finalize() - del self._capturing_funcargs - - def pytest_make_collect_report(self, __multicall__, collector): - method = self._getmethod(collector.config, collector.fspath) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - addouterr(rep, outerr) - return rep - - def pytest_runtest_setup(self, item): - self.resumecapture_item(item) - - def pytest_runtest_call(self, item): - self.resumecapture_item(item) - self.activate_funcargs(item) - - def pytest_runtest_teardown(self, item): - self.resumecapture_item(item) - - def pytest__teardown_final(self, __multicall__, session): - method = self._getmethod(session.config, None) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - if rep: - addouterr(rep, outerr) - return rep - - def pytest_keyboard_interrupt(self, excinfo): - if hasattr(self, '_capturing'): - self.suspendcapture() - - def pytest_runtest_makereport(self, __multicall__, item, call): - self.deactivate_funcargs() - rep = __multicall__.execute() - outerr = self.suspendcapture(item) - if not rep.passed: - addouterr(rep, outerr) - if not rep.passed or rep.when == "teardown": - outerr = ('', '') - item.outerr = outerr - return rep - -def pytest_funcarg__capsys(request): - """captures writes to sys.stdout/sys.stderr and makes - them available successively via a ``capsys.readouterr()`` method - which returns a ``(out, err)`` tuple of captured snapshot strings. - """ - return CaptureFuncarg(request, py.io.StdCapture) - -def pytest_funcarg__capfd(request): - """captures writes to file descriptors 1 and 2 and makes - snapshotted ``(out, err)`` string tuples available - via the ``capsys.readouterr()`` method. If the underlying - platform does not have ``os.dup`` (e.g. Jython) tests using - this funcarg will automatically skip. - """ - if not hasattr(os, 'dup'): - py.test.skip("capfd funcarg needs os.dup") - return CaptureFuncarg(request, py.io.StdCaptureFD) - - -class CaptureFuncarg: - def __init__(self, request, captureclass): - self._cclass = captureclass - self.capture = self._cclass(now=False) - #request.addfinalizer(self._finalize) - - def _start(self): - self.capture.startall() - - def _finalize(self): - if hasattr(self, 'capture'): - self.capture.reset() - del self.capture - - def readouterr(self): - return self.capture.readouterr() - - def close(self): - self._finalize() diff --git a/py/_plugin/pytest_doctest.py b/py/_plugin/pytest_doctest.py deleted file mode 100644 --- a/py/_plugin/pytest_doctest.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -collect and execute doctests from modules and test files. - -Usage -------------- - -By default all files matching the ``test*.txt`` pattern will -be run through the python standard ``doctest`` module. Issue:: - - py.test --doctest-glob='*.rst' - -to change the pattern. Additionally you can trigger running of -tests in all python modules (including regular python test modules):: - - py.test --doctest-modules - -You can also make these changes permanent in your project by -putting them into a conftest.py file like this:: - - # content of conftest.py - option_doctestmodules = True - option_doctestglob = "*.rst" -""" - -import py -from py._code.code import TerminalRepr, ReprFileLocation -import doctest - -def pytest_addoption(parser): - group = parser.getgroup("collect") - group.addoption("--doctest-modules", - action="store_true", default=False, - help="run doctests in all .py modules", - dest="doctestmodules") - group.addoption("--doctest-glob", - action="store", default="test*.txt", metavar="pat", - help="doctests file matching pattern, default: test*.txt", - dest="doctestglob") - -def pytest_collect_file(path, parent): - config = parent.config - if path.ext == ".py": - if config.getvalue("doctestmodules"): - return DoctestModule(path, parent) - elif path.check(fnmatch=config.getvalue("doctestglob")): - return DoctestTextfile(path, parent) - -class ReprFailDoctest(TerminalRepr): - def __init__(self, reprlocation, lines): - self.reprlocation = reprlocation - self.lines = lines - def toterminal(self, tw): - for line in self.lines: - tw.line(line) - self.reprlocation.toterminal(tw) - -class DoctestItem(py.test.collect.Item): - def __init__(self, path, parent): - name = self.__class__.__name__ + ":" + path.basename - super(DoctestItem, self).__init__(name=name, parent=parent) - self.fspath = path - - def repr_failure(self, excinfo): - if excinfo.errisinstance(doctest.DocTestFailure): - doctestfailure = excinfo.value - example = doctestfailure.example - test = doctestfailure.test - filename = test.filename - lineno = test.lineno + example.lineno + 1 - message = excinfo.type.__name__ - reprlocation = ReprFileLocation(filename, lineno, message) - checker = doctest.OutputChecker() - REPORT_UDIFF = doctest.REPORT_UDIFF - filelines = py.path.local(filename).readlines(cr=0) - i = max(test.lineno, max(0, lineno - 10)) # XXX? - lines = [] - for line in filelines[i:lineno]: - lines.append("%03d %s" % (i+1, line)) - i += 1 - lines += checker.output_difference(example, - doctestfailure.got, REPORT_UDIFF).split("\n") - return ReprFailDoctest(reprlocation, lines) - elif excinfo.errisinstance(doctest.UnexpectedException): - excinfo = py.code.ExceptionInfo(excinfo.value.exc_info) - return super(DoctestItem, self).repr_failure(excinfo) - else: - return super(DoctestItem, self).repr_failure(excinfo) - -class DoctestTextfile(DoctestItem): - def runtest(self): - if not self._deprecated_testexecution(): - failed, tot = doctest.testfile( - str(self.fspath), module_relative=False, - raise_on_error=True, verbose=0) - -class DoctestModule(DoctestItem): - def runtest(self): - module = self.fspath.pyimport() - failed, tot = doctest.testmod( - module, raise_on_error=True, verbose=0) diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -11,8 +11,8 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib import rgc from pypy.jit.backend.llsupport import symbolic -from pypy.jit.backend.x86.jump import remap_frame_layout -from pypy.jit.codewriter import heaptracker +from pypy.jit.backend.x86.jump import remap_frame_layout_mixed +from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.metainterp.resoperation import rop from pypy.jit.backend.llsupport.descr import BaseFieldDescr, BaseArrayDescr @@ -71,13 +71,16 @@ def convert_to_imm(self, c): adr = self.assembler.datablockwrapper.malloc_aligned(8, 8) - rffi.cast(rffi.CArrayPtr(rffi.DOUBLE), adr)[0] = c.getfloat() + x = c.getfloatstorage() + rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), adr)[0] = x return ConstFloatLoc(adr) def convert_to_imm_16bytes_align(self, c): adr = self.assembler.datablockwrapper.malloc_aligned(16, 16) - rffi.cast(rffi.CArrayPtr(rffi.DOUBLE), adr)[0] = c.getfloat() - rffi.cast(rffi.CArrayPtr(rffi.DOUBLE), adr)[1] = 0.0 + x = c.getfloatstorage() + y = longlong.ZEROF + rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), adr)[0] = x + rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), adr)[1] = y return ConstFloatLoc(adr) def after_call(self, v): @@ -107,6 +110,12 @@ return StackLoc(i, get_ebp_ofs(i+1), 2, box_type) else: return StackLoc(i, get_ebp_ofs(i), 1, box_type) + @staticmethod + def frame_size(box_type): + if IS_X86_32 and box_type == FLOAT: + return 2 + else: + return 1 class RegAlloc(object): @@ -221,7 +230,7 @@ selected_reg=None, need_lower_byte=False): if var.type == FLOAT: if isinstance(var, ConstFloat): - return FloatImmedLoc(var.getfloat()) + return FloatImmedLoc(var.getfloatstorage()) return self.xrm.make_sure_var_in_reg(var, forbidden_vars, selected_reg, need_lower_byte) else: @@ -680,11 +689,10 @@ def _maybe_consider_llong_lt(self, op): # XXX just a special case for now - from pypy.rlib.longlong2float import longlong2float box = op.getarg(2) if not isinstance(box, ConstFloat): return False - if not (box.value == longlong2float(r_longlong(0))): + if box.getlonglong() != 0: return False # "x < 0" box = op.getarg(1) @@ -703,8 +711,7 @@ self.xrm.possibly_free_var(op.getarg(1)) def _loc_of_const_longlong(self, value64): - from pypy.rlib.longlong2float import longlong2float - c = ConstFloat(longlong2float(value64)) + c = ConstFloat(value64) return self.xrm.convert_to_imm(c) def _consider_llong_from_int(self, op): @@ -715,47 +722,18 @@ loc1 = self._loc_of_const_longlong(r_longlong(box.value)) loc2 = None # unused else: - # requires the argument to be in eax, and trash edx. - loc1 = self.rm.make_sure_var_in_reg(box, selected_reg=eax) - tmpvar = TempBox() - self.rm.force_allocate_reg(tmpvar, [box], selected_reg=edx) - self.rm.possibly_free_var(tmpvar) + loc1 = self.rm.make_sure_var_in_reg(box) tmpxvar = TempBox() loc2 = self.xrm.force_allocate_reg(tmpxvar, [op.result]) self.xrm.possibly_free_var(tmpxvar) self.PerformLLong(op, [loc1, loc2], loc0) self.rm.possibly_free_var(box) - def _consider_llong_from_two_ints(self, op): + def _consider_llong_from_uint(self, op): assert IS_X86_32 - box1 = op.getarg(1) - box2 = op.getarg(2) loc0 = self.xrm.force_allocate_reg(op.result) - # - if isinstance(box1, ConstInt) and isinstance(box2, ConstInt): - # all-constant arguments: load the result value in a single step - value64 = r_longlong(box2.value) << 32 - value64 |= r_longlong(r_uint(box1.value)) - loc1 = self._loc_of_const_longlong(value64) - loc2 = None # unused - loc3 = None # unused - # - else: - tmpxvar = TempBox() - loc3 = self.xrm.force_allocate_reg(tmpxvar, [op.result]) - self.xrm.possibly_free_var(tmpxvar) - # - if isinstance(box1, ConstInt): - loc1 = self._loc_of_const_longlong(r_longlong(box1.value)) - else: - loc1 = self.rm.make_sure_var_in_reg(box1) - # - if isinstance(box2, ConstInt): - loc2 = self._loc_of_const_longlong(r_longlong(box2.value)) - else: - loc2 = self.rm.make_sure_var_in_reg(box2, [box1]) - # - self.PerformLLong(op, [loc1, loc2, loc3], loc0) + loc1 = self.rm.make_sure_var_in_reg(op.getarg(1)) + self.PerformLLong(op, [loc1], loc0) self.rm.possibly_free_vars_for_op(op) def _call(self, op, arglocs, force_store=[], guard_not_forced_op=None): @@ -805,8 +783,8 @@ return self._consider_llong_to_int(op) if oopspecindex == EffectInfo.OS_LLONG_FROM_INT: return self._consider_llong_from_int(op) - if oopspecindex == EffectInfo.OS_LLONG_FROM_TWO_INTS: - return self._consider_llong_from_two_ints(op) + if oopspecindex == EffectInfo.OS_LLONG_FROM_UINT: + return self._consider_llong_from_uint(op) if (oopspecindex == EffectInfo.OS_LLONG_EQ or oopspecindex == EffectInfo.OS_LLONG_NE): return self._consider_llong_eq_ne_xx(op) @@ -1227,16 +1205,17 @@ xmmtmploc = self.xrm.force_allocate_reg(box1, selected_reg=xmmtmp) # Part about non-floats # XXX we don't need a copy, we only just the original list - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations1 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type != FLOAT] assert tmploc not in nonfloatlocs - dst_locations = [loc for loc in nonfloatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, tmploc) + dst_locations1 = [loc for loc in nonfloatlocs if loc is not None] # Part about floats - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations2 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type == FLOAT] - dst_locations = [loc for loc in floatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, xmmtmp) + dst_locations2 = [loc for loc in floatlocs if loc is not None] + remap_frame_layout_mixed(assembler, + src_locations1, dst_locations1, tmploc, + src_locations2, dst_locations2, xmmtmp) self.rm.possibly_free_var(box) self.xrm.possibly_free_var(box1) self.possibly_free_vars_for_op(op) diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/py/_plugin/pytest_skipping.py b/py/_plugin/pytest_skipping.py deleted file mode 100644 --- a/py/_plugin/pytest_skipping.py +++ /dev/null @@ -1,347 +0,0 @@ -""" -advanced skipping for python test functions, classes or modules. - -With this plugin you can mark test functions for conditional skipping -or as "xfail", expected-to-fail. Skipping a test will avoid running it -while xfail-marked tests will run and result in an inverted outcome: -a pass becomes a failure and a fail becomes a semi-passing one. - -The need for skipping a test is usually connected to a condition. -If a test fails under all conditions then it's probably better -to mark your test as 'xfail'. - -By passing ``-rxs`` to the terminal reporter you will see extra -summary information on skips and xfail-run tests at the end of a test run. - -.. _skipif: - -Skipping a single function -------------------------------------------- - -Here is an example for marking a test function to be skipped -when run on a Python3 interpreter:: - - @py.test.mark.skipif("sys.version_info >= (3,0)") - def test_function(): - ... - -During test function setup the skipif condition is -evaluated by calling ``eval(expr, namespace)``. The namespace -contains the ``sys`` and ``os`` modules and the test -``config`` object. The latter allows you to skip based -on a test configuration value e.g. like this:: - - @py.test.mark.skipif("not config.getvalue('db')") - def test_function(...): - ... - -Create a shortcut for your conditional skip decorator -at module level like this:: - - win32only = py.test.mark.skipif("sys.platform != 'win32'") - - @win32only - def test_function(): - ... - - -skip groups of test functions --------------------------------------- - -As with all metadata function marking you can do it at -`whole class- or module level`_. Here is an example -for skipping all methods of a test class based on platform:: - - class TestPosixCalls: - pytestmark = py.test.mark.skipif("sys.platform == 'win32'") - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -The ``pytestmark`` decorator will be applied to each test function. -If your code targets python2.6 or above you can equivalently use -the skipif decorator on classes:: - - @py.test.mark.skipif("sys.platform == 'win32'") - class TestPosixCalls: - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -It is fine in general to apply multiple "skipif" decorators -on a single function - this means that if any of the conditions -apply the function will be skipped. - -.. _`whole class- or module level`: mark.html#scoped-marking - - -mark a test function as **expected to fail** -------------------------------------------------------- - -You can use the ``xfail`` marker to indicate that you -expect the test to fail:: - - @py.test.mark.xfail - def test_function(): - ... - -This test will be run but no traceback will be reported -when it fails. Instead terminal reporting will list it in the -"expected to fail" or "unexpectedly passing" sections. - -Same as with skipif_ you can also selectively expect a failure -depending on platform:: - - @py.test.mark.xfail("sys.version_info >= (3,0)") - def test_function(): - ... - -To not run a test and still regard it as "xfailed":: - - @py.test.mark.xfail(..., run=False) - -To specify an explicit reason to be shown with xfailure detail:: - - @py.test.mark.xfail(..., reason="my reason") - -imperative xfail from within a test or setup function ------------------------------------------------------- - -If you cannot declare xfail-conditions at import time -you can also imperatively produce an XFail-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.xfail("unsuppored configuration") - - -skipping on a missing import dependency --------------------------------------------------- - -You can use the following import helper at module level -or within a test or test setup function:: - - docutils = py.test.importorskip("docutils") - -If ``docutils`` cannot be imported here, this will lead to a -skip outcome of the test. You can also skip dependeing if -if a library does not come with a high enough version:: - - docutils = py.test.importorskip("docutils", minversion="0.3") - -The version will be read from the specified module's ``__version__`` attribute. - -imperative skip from within a test or setup function ------------------------------------------------------- - -If for some reason you cannot declare skip-conditions -you can also imperatively produce a Skip-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.skip("unsuppored configuration") - -""" - -import py - -def pytest_addoption(parser): - group = parser.getgroup("general") - group.addoption('--runxfail', - action="store_true", dest="runxfail", default=False, - help="run tests even if they are marked xfail") - -class MarkEvaluator: - def __init__(self, item, name): - self.item = item - self.name = name - self.holder = getattr(item.obj, name, None) - - def __bool__(self): - return bool(self.holder) - __nonzero__ = __bool__ - - def istrue(self): - if self.holder: - d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} - if self.holder.args: - self.result = False - for expr in self.holder.args: - self.expr = expr - if isinstance(expr, str): - result = cached_eval(self.item.config, expr, d) - else: - result = expr - if result: - self.result = True - self.expr = expr - break - else: - self.result = True - return getattr(self, 'result', False) - - def get(self, attr, default=None): - return self.holder.kwargs.get(attr, default) - - def getexplanation(self): - expl = self.get('reason', None) - if not expl: - if not hasattr(self, 'expr'): - return "" - else: - return "condition: " + self.expr - return expl - - -def pytest_runtest_setup(item): - if not isinstance(item, py.test.collect.Function): - return - evalskip = MarkEvaluator(item, 'skipif') - if evalskip.istrue(): - py.test.skip(evalskip.getexplanation()) - item._evalxfail = MarkEvaluator(item, 'xfail') - if not item.config.getvalue("runxfail"): - if item._evalxfail.istrue(): - if not item._evalxfail.get('run', True): - py.test.skip("xfail") - -def pytest_runtest_makereport(__multicall__, item, call): - if not isinstance(item, py.test.collect.Function): - return - if not (call.excinfo and - call.excinfo.errisinstance(py.test.xfail.Exception)): - evalxfail = getattr(item, '_evalxfail', None) - if not evalxfail: - return - if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception): - if not item.config.getvalue("runxfail"): - rep = __multicall__.execute() - rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg - rep.skipped = True - rep.failed = False - return rep - if call.when == "setup": - rep = __multicall__.execute() - if rep.skipped and evalxfail.istrue(): - expl = evalxfail.getexplanation() - if not evalxfail.get("run", True): - expl = "[NOTRUN] " + expl - rep.keywords['xfail'] = expl - return rep - elif call.when == "call": - rep = __multicall__.execute() - if not item.config.getvalue("runxfail") and evalxfail.istrue(): - if call.excinfo: - rep.skipped = True - rep.failed = rep.passed = False - else: - rep.skipped = rep.passed = False - rep.failed = True - rep.keywords['xfail'] = evalxfail.getexplanation() - else: - if 'xfail' in rep.keywords: - del rep.keywords['xfail'] - return rep - -# called by terminalreporter progress reporting -def pytest_report_teststatus(report): - if 'xfail' in report.keywords: - if report.skipped: - return "xfailed", "x", "xfail" - elif report.failed: - return "xpassed", "X", "XPASS" - -# called by the terminalreporter instance/plugin -def pytest_terminal_summary(terminalreporter): - tr = terminalreporter - if not tr.reportchars: - #for name in "xfailed skipped failed xpassed": - # if not tr.stats.get(name, 0): - # tr.write_line("HINT: use '-r' option to see extra " - # "summary info about tests") - # break - return - - lines = [] - for char in tr.reportchars: - if char == "x": - show_xfailed(terminalreporter, lines) - elif char == "X": - show_xpassed(terminalreporter, lines) - elif char == "f": - show_failed(terminalreporter, lines) - elif char == "s": - show_skipped(terminalreporter, lines) - if lines: - tr._tw.sep("=", "short test summary info") - for line in lines: - tr._tw.line(line) - -def show_failed(terminalreporter, lines): - tw = terminalreporter._tw - failed = terminalreporter.stats.get("failed") - if failed: - for rep in failed: - pos = terminalreporter.gettestid(rep.item) - lines.append("FAIL %s" %(pos, )) - -def show_xfailed(terminalreporter, lines): - xfailed = terminalreporter.stats.get("xfailed") - if xfailed: - for rep in xfailed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XFAIL %s %s" %(pos, reason)) - -def show_xpassed(terminalreporter, lines): - xpassed = terminalreporter.stats.get("xpassed") - if xpassed: - for rep in xpassed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XPASS %s %s" %(pos, reason)) - -def cached_eval(config, expr, d): - if not hasattr(config, '_evalcache'): - config._evalcache = {} - try: - return config._evalcache[expr] - except KeyError: - #import sys - #print >>sys.stderr, ("cache-miss: %r" % expr) - config._evalcache[expr] = x = eval(expr, d) - return x - - -def folded_skips(skipped): - d = {} - for event in skipped: - entry = event.longrepr.reprcrash - key = entry.path, entry.lineno, entry.message - d.setdefault(key, []).append(event) - l = [] - for key, events in d.items(): - l.append((len(events),) + key) - return l - -def show_skipped(terminalreporter, lines): - tr = terminalreporter - skipped = tr.stats.get('skipped', []) - if skipped: - #if not tr.hasopt('skipped'): - # tr.write_line( - # "%d skipped tests, specify -rs for more info" % - # len(skipped)) - # return - fskips = folded_skips(skipped) - if fskips: - #tr.write_sep("_", "skipped test summary") - for num, fspath, lineno, reason in fskips: - if reason.startswith("Skipped: "): - reason = reason[9:] - lines.append("SKIP [%d] %s:%d: %s" % - (num, fspath, lineno, reason)) diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/py/_plugin/pytest_nose.py b/py/_plugin/pytest_nose.py deleted file mode 100644 --- a/py/_plugin/pytest_nose.py +++ /dev/null @@ -1,98 +0,0 @@ -"""nose-compatibility plugin: allow to run nose test suites natively. - -This is an experimental plugin for allowing to run tests written -in 'nosetests style with py.test. - -Usage -------------- - -type:: - - py.test # instead of 'nosetests' - -and you should be able to run nose style tests and at the same -time can make full use of py.test's capabilities. - -Supported nose Idioms ----------------------- - -* setup and teardown at module/class/method level -* SkipTest exceptions and markers -* setup/teardown decorators -* yield-based tests and their setup -* general usage of nose utilities - -Unsupported idioms / issues ----------------------------------- - -- nose-style doctests are not collected and executed correctly, - also fixtures don't work. - -- no nose-configuration is recognized - -If you find other issues or have suggestions please run:: - - py.test --pastebin=all - -and send the resulting URL to a py.test contact channel, -at best to the mailing list. -""" -import py -import inspect -import sys - -def pytest_runtest_makereport(__multicall__, item, call): - SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None) - if SkipTest: - if call.excinfo and call.excinfo.errisinstance(SkipTest): - # let's substitute the excinfo with a py.test.skip one - call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when) - call.excinfo = call2.excinfo - -def pytest_report_iteminfo(item): - # nose 0.11.1 uses decorators for "raises" and other helpers. - # for reporting progress by filename we fish for the filename - if isinstance(item, py.test.collect.Function): - obj = item.obj - if hasattr(obj, 'compat_co_firstlineno'): - fn = sys.modules[obj.__module__].__file__ - if fn.endswith(".pyc"): - fn = fn[:-1] - #assert 0 - #fn = inspect.getsourcefile(obj) or inspect.getfile(obj) - lineno = obj.compat_co_firstlineno - return py.path.local(fn), lineno, obj.__module__ - -def pytest_runtest_setup(item): - if isinstance(item, (py.test.collect.Function)): - if isinstance(item.parent, py.test.collect.Generator): - gen = item.parent - if not hasattr(gen, '_nosegensetup'): - call_optional(gen.obj, 'setup') - if isinstance(gen.parent, py.test.collect.Instance): - call_optional(gen.parent.obj, 'setup') - gen._nosegensetup = True - if not call_optional(item.obj, 'setup'): - # call module level setup if there is no object level one - call_optional(item.parent.obj, 'setup') - -def pytest_runtest_teardown(item): - if isinstance(item, py.test.collect.Function): - if not call_optional(item.obj, 'teardown'): - call_optional(item.parent.obj, 'teardown') - #if hasattr(item.parent, '_nosegensetup'): - # #call_optional(item._nosegensetup, 'teardown') - # del item.parent._nosegensetup - -def pytest_make_collect_report(collector): - if isinstance(collector, py.test.collect.Generator): - call_optional(collector.obj, 'setup') - -def call_optional(obj, name): - method = getattr(obj, name, None) - if method: - ismethod = inspect.ismethod(method) - rawcode = py.code.getrawcode(method) - if not rawcode.co_varnames[ismethod:]: - method() - return True diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/py/_plugin/pytest_mark.py b/py/_plugin/pytest_mark.py deleted file mode 100644 --- a/py/_plugin/pytest_mark.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -generic mechanism for marking python functions. - -By using the ``py.test.mark`` helper you can instantiate -decorators that will set named meta data on test functions. - -Marking a single function ----------------------------------------------------- - -You can "mark" a test function with meta data like this:: - - @py.test.mark.webtest - def test_send_http(): - ... - -This will set a "Marker" instance as a function attribute named "webtest". -You can also specify parametrized meta data like this:: - - @py.test.mark.webtest(firefox=30) - def test_receive(): - ... - -The named marker can be accessed like this later:: - - test_receive.webtest.kwargs['firefox'] == 30 - -In addition to set key-value pairs you can also use positional arguments:: - - @py.test.mark.webtest("triangular") - def test_receive(): - ... - -and later access it with ``test_receive.webtest.args[0] == 'triangular``. - -.. _`scoped-marking`: - -Marking whole classes or modules ----------------------------------------------------- - -If you are programming with Python2.6 you may use ``py.test.mark`` decorators -with classes to apply markers to all its test methods:: - - @py.test.mark.webtest - class TestClass: - def test_startup(self): - ... - def test_startup_and_more(self): - ... - -This is equivalent to directly applying the decorator to the -two test functions. - -To remain compatible with Python2.5 you can also set a -``pytestmark`` attribute on a TestClass like this:: - - import py - - class TestClass: - pytestmark = py.test.mark.webtest - -or if you need to use multiple markers you can use a list:: - - import py - - class TestClass: - pytestmark = [py.test.mark.webtest, pytest.mark.slowtest] - -You can also set a module level marker:: - - import py - pytestmark = py.test.mark.webtest - -in which case it will be applied to all functions and -methods defined in the module. - -Using "-k MARKNAME" to select tests ----------------------------------------------------- - -You can use the ``-k`` command line option to select -tests:: - - py.test -k webtest # will only run tests marked as webtest - -""" -import py - -def pytest_namespace(): - return {'mark': MarkGenerator()} - -class MarkGenerator: - """ non-underscore attributes of this object can be used as decorators for - marking test functions. Example: @py.test.mark.slowtest in front of a - function will set the 'slowtest' marker object on it. """ - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - return MarkDecorator(name) - -class MarkDecorator: - """ decorator for setting function attributes. """ - def __init__(self, name): - self.markname = name - self.kwargs = {} - self.args = [] - - def __repr__(self): - d = self.__dict__.copy() - name = d.pop('markname') - return "" %(name, d) - - def __call__(self, *args, **kwargs): - """ if passed a single callable argument: decorate it with mark info. - otherwise add *args/**kwargs in-place to mark information. """ - if args: - func = args[0] - if len(args) == 1 and hasattr(func, '__call__') or \ - hasattr(func, '__bases__'): - if hasattr(func, '__bases__'): - if hasattr(func, 'pytestmark'): - l = func.pytestmark - if not isinstance(l, list): - func.pytestmark = [l, self] - else: - l.append(self) - else: - func.pytestmark = [self] - else: - holder = getattr(func, self.markname, None) - if holder is None: - holder = MarkInfo(self.markname, self.args, self.kwargs) - setattr(func, self.markname, holder) - else: - holder.kwargs.update(self.kwargs) - holder.args.extend(self.args) - return func - else: - self.args.extend(args) - self.kwargs.update(kwargs) - return self - -class MarkInfo: - def __init__(self, name, args, kwargs): - self._name = name - self.args = args - self.kwargs = kwargs - - def __getattr__(self, name): - if name[0] != '_' and name in self.kwargs: - py.log._apiwarn("1.1", "use .kwargs attribute to access key-values") - return self.kwargs[name] - raise AttributeError(name) - - def __repr__(self): - return "" % ( - self._name, self.args, self.kwargs) - - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - item = __multicall__.execute() - if isinstance(item, py.test.collect.Function): - cls = collector.getparent(py.test.collect.Class) - mod = collector.getparent(py.test.collect.Module) - func = item.obj - func = getattr(func, '__func__', func) # py3 - func = getattr(func, 'im_func', func) # py2 - for parent in [x for x in (mod, cls) if x]: - marker = getattr(parent.obj, 'pytestmark', None) - if marker is not None: - if not isinstance(marker, list): - marker = [marker] - for mark in marker: - if isinstance(mark, MarkDecorator): - mark(func) - return item diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -indended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py deleted file mode 100644 --- a/py/_plugin/pytest_restdoc.py +++ /dev/null @@ -1,429 +0,0 @@ -""" -perform ReST syntax, local and remote reference tests on .rst/.txt files. -""" -import py -import sys, os, re - -def pytest_addoption(parser): - group = parser.getgroup("ReST", "ReST documentation check options") - group.addoption('-R', '--urlcheck', - action="store_true", dest="urlcheck", default=False, - help="urlopen() remote links found in ReST text files.") - group.addoption('--urltimeout', action="store", metavar="secs", - type="int", dest="urlcheck_timeout", default=5, - help="timeout in seconds for remote urlchecks") - group.addoption('--forcegen', - action="store_true", dest="forcegen", default=False, - help="force generation of html files.") - -def pytest_collect_file(path, parent): - if path.ext in (".txt", ".rst"): - project = getproject(path) - if project is not None: - return ReSTFile(path, parent=parent, project=project) - -def getproject(path): - for parent in path.parts(reverse=True): - confrest = parent.join("confrest.py") - if confrest.check(): - Project = confrest.pyimport().Project - return Project(parent) - -class ReSTFile(py.test.collect.File): - def __init__(self, fspath, parent, project): - super(ReSTFile, self).__init__(fspath=fspath, parent=parent) - self.project = project - - def collect(self): - return [ - ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project), - LinkCheckerMaker("checklinks", parent=self), - DoctestText("doctest", parent=self), - ] - -def deindent(s, sep='\n'): - leastspaces = -1 - lines = s.split(sep) - for line in lines: - if not line.strip(): - continue - spaces = len(line) - len(line.lstrip()) - if leastspaces == -1 or spaces < leastspaces: - leastspaces = spaces - if leastspaces == -1: - return s - for i, line in enumerate(lines): - if not line.strip(): - lines[i] = '' - else: - lines[i] = line[leastspaces:] - return sep.join(lines) - -class ReSTSyntaxTest(py.test.collect.Item): - def __init__(self, name, parent, project): - super(ReSTSyntaxTest, self).__init__(name=name, parent=parent) - self.project = project - - def reportinfo(self): - return self.fspath, None, "syntax check" - - def runtest(self): - self.restcheck(py.path.svnwc(self.fspath)) - - def restcheck(self, path): - py.test.importorskip("docutils") - self.register_linkrole() - from docutils.utils import SystemMessage - try: - self._checkskip(path, self.project.get_htmloutputpath(path)) - self.project.process(path) - except KeyboardInterrupt: - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") - - def register_linkrole(self): - #directive.register_linkrole('api', self.resolve_linkrole) - #directive.register_linkrole('source', self.resolve_linkrole) -# -# # XXX fake sphinx' "toctree" and refs -# directive.register_linkrole('ref', self.resolve_linkrole) - - from docutils.parsers.rst import directives - def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - toctree_directive.content = 1 - toctree_directive.options = {'maxdepth': int, 'glob': directives.flag, - 'hidden': directives.flag} - directives.register_directive('toctree', toctree_directive) - self.register_pygments() - - def register_pygments(self): - # taken from pygments-main/external/rst-directive.py - from docutils.parsers.rst import directives - try: - from pygments.formatters import HtmlFormatter - except ImportError: - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - pygments_directive.options = {} - else: - # The default formatter - DEFAULT = HtmlFormatter(noclasses=True) - # Add name -> formatter pairs for every variant you want to use - VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), - } - - from docutils import nodes - - from pygments import highlight - from pygments.lexers import get_lexer_by_name, TextLexer - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight('\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - - pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - directives.register_directive('sourcecode', pygments_directive) - - def resolve_linkrole(self, name, text, check=True): - apigen_relpath = self.project.apigen_relpath - - if name == 'api': - if text == 'py': - return ('py', apigen_relpath + 'api/index.html') - else: - assert text.startswith('py.'), ( - 'api link "%s" does not point to the py package') % (text,) - dotted_name = text - if dotted_name.find('(') > -1: - dotted_name = dotted_name[:text.find('(')] - # remove pkg root - path = dotted_name.split('.')[1:] - dotted_name = '.'.join(path) - obj = py - if check: - for chunk in path: - try: - obj = getattr(obj, chunk) - except AttributeError: - raise AssertionError( - 'problem with linkrole :api:`%s`: can not resolve ' - 'dotted name %s' % (text, dotted_name,)) - return (text, apigen_relpath + 'api/%s.html' % (dotted_name,)) - elif name == 'source': - assert text.startswith('py/'), ('source link "%s" does not point ' - 'to the py package') % (text,) - relpath = '/'.join(text.split('/')[1:]) - if check: - pkgroot = py._pydir - abspath = pkgroot.join(relpath) - assert pkgroot.join(relpath).check(), ( - 'problem with linkrole :source:`%s`: ' - 'path %s does not exist' % (text, relpath)) - if relpath.endswith('/') or not relpath: - relpath += 'index.html' - else: - relpath += '.html' - return (text, apigen_relpath + 'source/%s' % (relpath,)) - elif name == 'ref': - return ("", "") - - def _checkskip(self, lpath, htmlpath=None): - if not self.config.getvalue("forcegen"): - lpath = py.path.local(lpath) - if htmlpath is not None: - htmlpath = py.path.local(htmlpath) - if lpath.ext == '.txt': - htmlpath = htmlpath or lpath.new(ext='.html') - if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): - py.test.skip("html file is up to date, use --forcegen to regenerate") - #return [] # no need to rebuild - -class DoctestText(py.test.collect.Item): - def reportinfo(self): - return self.fspath, None, "doctest" - - def runtest(self): - content = self._normalize_linesep() - newcontent = self.config.hook.pytest_doctest_prepare_content(content=content) - if newcontent is not None: - content = newcontent - s = content - l = [] - prefix = '.. >>> ' - mod = py.std.types.ModuleType(self.fspath.purebasename) - skipchunk = False - for line in deindent(s).split('\n'): - stripped = line.strip() - if skipchunk and line.startswith(skipchunk): - py.builtin.print_("skipping", line) - continue - skipchunk = False - if stripped.startswith(prefix): - try: - py.builtin.exec_(py.code.Source( - stripped[len(prefix):]).compile(), mod.__dict__) - except ValueError: - e = sys.exc_info()[1] - if e.args and e.args[0] == "skipchunk": - skipchunk = " " * (len(line) - len(line.lstrip())) - else: - raise - else: - l.append(line) - docstring = "\n".join(l) - mod.__doc__ = docstring - failed, tot = py.std.doctest.testmod(mod, verbose=1) - if failed: - py.test.fail("doctest %s: %s failed out of %s" %( - self.fspath, failed, tot)) - - def _normalize_linesep(self): - # XXX quite nasty... but it works (fixes win32 issues) - s = self.fspath.read() - linesep = '\n' - if '\r' in s: - if '\n' not in s: - linesep = '\r' - else: - linesep = '\r\n' - s = s.replace(linesep, '\n') - return s - -class LinkCheckerMaker(py.test.collect.Collector): - def collect(self): - return list(self.genlinkchecks()) - - def genlinkchecks(self): - path = self.fspath - # generating functions + args as single tests - timeout = self.config.getvalue("urlcheck_timeout") - for lineno, line in enumerate(path.readlines()): - line = line.strip() - if line.startswith('.. _'): - if line.startswith('.. _`'): - delim = '`:' - else: - delim = ':' - l = line.split(delim, 1) - if len(l) != 2: - continue - tryfn = l[1].strip() - name = "%s:%d" %(tryfn, lineno) - if tryfn.startswith('http:') or tryfn.startswith('https'): - if self.config.getvalue("urlcheck"): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno, timeout), checkfunc=urlcheck) - elif tryfn.startswith('webcal:'): - continue - else: - i = tryfn.find('#') - if i != -1: - checkfn = tryfn[:i] - else: - checkfn = tryfn - if checkfn.strip() and (1 or checkfn.endswith('.html')): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno), checkfunc=localrefcheck) - -class CheckLink(py.test.collect.Item): - def __init__(self, name, parent, args, checkfunc): - super(CheckLink, self).__init__(name, parent) - self.args = args - self.checkfunc = checkfunc - - def runtest(self): - return self.checkfunc(*self.args) - - def reportinfo(self, basedir=None): - return (self.fspath, self.args[2], "checklink: %s" % self.args[0]) - -def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): - old = py.std.socket.getdefaulttimeout() - py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN) - try: - try: - py.builtin.print_("trying remote", tryfn) - py.std.urllib2.urlopen(tryfn) - finally: - py.std.socket.setdefaulttimeout(old) - except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): - e = sys.exc_info()[1] - if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden - py.test.skip("%s: %s" %(tryfn, str(e))) - else: - py.test.fail("remote reference error %r in %s:%d\n%s" %( - tryfn, path.basename, lineno+1, e)) - -def localrefcheck(tryfn, path, lineno): - # assume it should be a file - i = tryfn.find('#') - if tryfn.startswith('javascript:'): - return # don't check JS refs - if i != -1: - anchor = tryfn[i+1:] - tryfn = tryfn[:i] - else: - anchor = '' - fn = path.dirpath(tryfn) - ishtml = fn.ext == '.html' - fn = ishtml and fn.new(ext='.txt') or fn - py.builtin.print_("filename is", fn) - if not fn.check(): # not ishtml or not fn.check(): - if not py.path.local(tryfn).check(): # the html could be there - py.test.fail("reference error %r in %s:%d" %( - tryfn, path.basename, lineno+1)) - if anchor: - source = unicode(fn.read(), 'latin1') - source = source.lower().replace('-', ' ') # aehem - - anchor = anchor.replace('-', ' ') - match2 = ".. _`%s`:" % anchor - match3 = ".. _%s:" % anchor - candidates = (anchor, match2, match3) - py.builtin.print_("candidates", repr(candidates)) - for line in source.split('\n'): - line = line.strip() - if line in candidates: - break - else: - py.test.fail("anchor reference error %s#%s in %s:%d" %( - tryfn, anchor, path.basename, lineno+1)) - -if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()): - def log(msg): - print(msg) -else: - def log(msg): - pass - -def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'): - """ return html latin1-encoded document for the given input. - source a ReST-string - sourcepath where to look for includes (basically) - stylesheet path (to be used if any) - """ - from docutils.core import publish_string - kwargs = { - 'stylesheet' : stylesheet, - 'stylesheet_path': None, - 'traceback' : 1, - 'embed_stylesheet': 0, - 'output_encoding' : encoding, - #'halt' : 0, # 'info', - 'halt_level' : 2, - } - # docutils uses os.getcwd() :-( - source_path = os.path.abspath(str(source_path)) - prevdir = os.getcwd() - try: - #os.chdir(os.path.dirname(source_path)) - return publish_string(source, source_path, writer_name='html', - settings_overrides=kwargs) - finally: - os.chdir(prevdir) - -def process(txtpath, encoding='latin1'): - """ process a textfile """ - log("processing %s" % txtpath) - assert txtpath.check(ext='.txt') - if isinstance(txtpath, py.path.svnwc): - txtpath = txtpath.localpath - htmlpath = txtpath.new(ext='.html') - #svninfopath = txtpath.localpath.new(ext='.svninfo') - - style = txtpath.dirpath('style.css') - if style.check(): - stylesheet = style.basename - else: - stylesheet = None - content = unicode(txtpath.read(), encoding) - doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding) - htmlpath.open('wb').write(doc) - #log("wrote %r" % htmlpath) - #if txtpath.check(svnwc=1, versioned=1): - # info = txtpath.info() - # svninfopath.dump(info) - -if sys.version_info > (3, 0): - def _uni(s): return s -else: - def _uni(s): - return unicode(s) - -rex1 = re.compile(r'.*(.*).*', re.MULTILINE | re.DOTALL) -rex2 = re.compile(r'.*
(.*)
.*', re.MULTILINE | re.DOTALL) - -def strip_html_header(string, encoding='utf8'): - """ return the content of the body-tag """ - uni = unicode(string, encoding) - for rex in rex1,rex2: - match = rex.search(uni) - if not match: - break - uni = match.group(1) - return uni - -class Project: # used for confrest.py files - def __init__(self, sourcepath): - self.sourcepath = sourcepath - def process(self, path): - return process(path) - def get_htmloutputpath(self, path): - return path.new(ext='html') diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimzations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/py/_plugin/pytest_helpconfig.py b/py/_plugin/pytest_helpconfig.py deleted file mode 100644 --- a/py/_plugin/pytest_helpconfig.py +++ /dev/null @@ -1,164 +0,0 @@ -""" provide version info, conftest/environment config names. -""" -import py -import inspect, sys - -def pytest_addoption(parser): - group = parser.getgroup('debugconfig') - group.addoption('--version', action="store_true", - help="display py lib version and import information.") - group._addoption('-p', action="append", dest="plugins", default = [], - metavar="name", - help="early-load given plugin (multi-allowed).") - group.addoption('--traceconfig', - action="store_true", dest="traceconfig", default=False, - help="trace considerations of conftest.py files."), - group._addoption('--nomagic', - action="store_true", dest="nomagic", default=False, - help="don't reinterpret asserts, no traceback cutting. ") - group.addoption('--debug', - action="store_true", dest="debug", default=False, - help="generate and show internal debugging information.") - group.addoption("--help-config", action="store_true", dest="helpconfig", - help="show available conftest.py and ENV-variable names.") - - -def pytest_configure(__multicall__, config): - if config.option.version: - p = py.path.local(py.__file__).dirpath() - sys.stderr.write("This is py.test version %s, imported from %s\n" % - (py.__version__, p)) - sys.exit(0) - if not config.option.helpconfig: - return - __multicall__.execute() - options = [] - for group in config._parser._groups: - options.extend(group.options) - widths = [0] * 10 - tw = py.io.TerminalWriter() - tw.sep("-") - tw.line("%-13s | %-18s | %-25s | %s" %( - "cmdline name", "conftest.py name", "ENV-variable name", "help")) - tw.sep("-") - - options = [opt for opt in options if opt._long_opts] - options.sort(key=lambda x: x._long_opts) - for opt in options: - if not opt._long_opts or not opt.dest: - continue - optstrings = list(opt._long_opts) # + list(opt._short_opts) - optstrings = filter(None, optstrings) - optstring = "|".join(optstrings) - line = "%-13s | %-18s | %-25s | %s" %( - optstring, - "option_%s" % opt.dest, - "PYTEST_OPTION_%s" % opt.dest.upper(), - opt.help and opt.help or "", - ) - tw.line(line[:tw.fullwidth]) - for name, help in conftest_options: - line = "%-13s | %-18s | %-25s | %s" %( - "", - name, - "", - help, - ) - tw.line(line[:tw.fullwidth]) - - tw.sep("-") - sys.exit(0) - -conftest_options = ( - ('pytest_plugins', 'list of plugin names to load'), - ('collect_ignore', '(relative) paths ignored during collection'), - ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), -) - -def pytest_report_header(config): - lines = [] - if config.option.debug or config.option.traceconfig: - lines.append("using py lib: %s" % (py.path.local(py.__file__).dirpath())) - if config.option.traceconfig: - lines.append("active plugins:") - plugins = [] - items = config.pluginmanager._name2plugin.items() - for name, plugin in items: - lines.append(" %-20s: %s" %(name, repr(plugin))) - return lines - - -# ===================================================== -# validate plugin syntax and hooks -# ===================================================== - -def pytest_plugin_registered(manager, plugin): - methods = collectattr(plugin) - hooks = {} - for hookspec in manager.hook._hookspecs: - hooks.update(collectattr(hookspec)) - - stringio = py.io.TextIO() - def Print(*args): - if args: - stringio.write(" ".join(map(str, args))) - stringio.write("\n") - - fail = False - while methods: - name, method = methods.popitem() - #print "checking", name - if isgenerichook(name): - continue - if name not in hooks: - if not getattr(method, 'optionalhook', False): - Print("found unknown hook:", name) - fail = True - else: - #print "checking", method - method_args = getargs(method) - #print "method_args", method_args - if '__multicall__' in method_args: - method_args.remove('__multicall__') - hook = hooks[name] - hookargs = getargs(hook) - for arg in method_args: - if arg not in hookargs: - Print("argument %r not available" %(arg, )) - Print("actual definition: %s" %(formatdef(method))) - Print("available hook arguments: %s" % - ", ".join(hookargs)) - fail = True - break - #if not fail: - # print "matching hook:", formatdef(method) - if fail: - name = getattr(plugin, '__name__', plugin) - raise PluginValidationError("%s:\n%s" %(name, stringio.getvalue())) - -class PluginValidationError(Exception): - """ plugin failed validation. """ - -def isgenerichook(name): - return name == "pytest_plugins" or \ - name.startswith("pytest_funcarg__") - -def getargs(func): - args = inspect.getargs(py.code.getrawcode(func))[0] - startindex = inspect.ismethod(func) and 1 or 0 - return args[startindex:] - -def collectattr(obj, prefixes=("pytest_",)): - methods = {} - for apiname in dir(obj): - for prefix in prefixes: - if apiname.startswith(prefix): - methods[apiname] = getattr(obj, apiname) - return methods - -def formatdef(func): - return "%s%s" %( - func.__name__, - inspect.formatargspec(*inspect.getargspec(func)) - ) - diff --git a/py/bin/py.which b/py/bin/py.which deleted file mode 100755 --- a/py/bin/py.which +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pywhich() \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/py/bin/py.countloc b/py/bin/py.countloc deleted file mode 100755 --- a/py/bin/py.countloc +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycountloc() \ No newline at end of file diff --git a/pypy/jit/backend/x86/regloc.py b/pypy/jit/backend/x86/regloc.py --- a/pypy/jit/backend/x86/regloc.py +++ b/pypy/jit/backend/x86/regloc.py @@ -6,6 +6,7 @@ from pypy.rlib.objectmodel import specialize, instantiate from pypy.rlib.rarithmetic import intmask from pypy.jit.metainterp.history import FLOAT +from pypy.jit.codewriter import longlong # # This module adds support for "locations", which can be either in a Const, @@ -45,9 +46,6 @@ # One of INT, REF, FLOAT self.type = type - def frame_size(self): - return self.width // WORD - def __repr__(self): return '%d(%%ebp)' % (self.value,) @@ -212,9 +210,8 @@ _immutable_ = True width = 8 - def __init__(self, floatvalue): - from pypy.rlib.longlong2float import float2longlong - self.aslonglong = float2longlong(floatvalue) + def __init__(self, floatstorage): + self.aslonglong = floatstorage def low_part(self): return intmask(self.aslonglong) @@ -229,17 +226,16 @@ return ImmedLoc(self.high_part()) def __repr__(self): - from pypy.rlib.longlong2float import longlong2float - floatvalue = longlong2float(self.aslonglong) + floatvalue = longlong.getrealfloat(self.aslonglong) return '' % (floatvalue,) def location_code(self): raise NotImplementedError if IS_X86_64: - def FloatImmedLoc(floatvalue): + def FloatImmedLoc(floatstorage): from pypy.rlib.longlong2float import float2longlong - value = intmask(float2longlong(floatvalue)) + value = intmask(float2longlong(floatstorage)) return ImmedLoc(value) @@ -492,7 +488,9 @@ MOVSX16 = _binaryop('MOVSX16') MOV32 = _binaryop('MOV32') MOVSX32 = _binaryop('MOVSX32') - XCHG = _binaryop('XCHG') + # Avoid XCHG because it always implies atomic semantics, which is + # slower and does not pair well for dispatch. + #XCHG = _binaryop('XCHG') PUSH = _unaryop('PUSH') POP = _unaryop('POP') diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/py/bin/env.cmd b/py/bin/env.cmd deleted file mode 100644 --- a/py/bin/env.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -20,11 +20,11 @@ from pypy.jit.metainterp import resoperation, executor from pypy.jit.metainterp.resoperation import rop from pypy.jit.backend.llgraph import symbolic +from pypy.jit.codewriter import longlong from pypy.rlib.objectmodel import ComputedIntSymbolic, we_are_translated from pypy.rlib.rarithmetic import ovfcheck from pypy.rlib.rarithmetic import r_longlong, r_ulonglong, r_uint -from pypy.rlib.longlong2float import longlong2float, float2longlong import py from pypy.tool.ansi_print import ansi_log @@ -302,7 +302,7 @@ return compile_start_ref_var(loop, lltype.Signed) def compile_start_float_var(loop): - return compile_start_ref_var(loop, lltype.Float) + return compile_start_ref_var(loop, longlong.FLOATSTORAGE) def compile_start_ref_var(loop, TYPE): loop = _from_opaque(loop) @@ -341,7 +341,7 @@ compile_add_ref_const(loop, value, lltype.Signed) def compile_add_float_const(loop, value): - compile_add_ref_const(loop, value, lltype.Float) + compile_add_ref_const(loop, value, longlong.FLOATSTORAGE) def compile_add_ref_const(loop, value, TYPE): loop = _from_opaque(loop) @@ -354,7 +354,7 @@ return compile_add_ref_result(loop, lltype.Signed) def compile_add_float_result(loop): - return compile_add_ref_result(loop, lltype.Float) + return compile_add_ref_result(loop, longlong.FLOATSTORAGE) def compile_add_ref_result(loop, TYPE): loop = _from_opaque(loop) @@ -487,8 +487,8 @@ x = self.as_ptr(result) elif RESTYPE is ootype.Object: x = self.as_object(result) - elif RESTYPE is lltype.Float: - x = self.as_float(result) + elif RESTYPE is longlong.FLOATSTORAGE: + x = self.as_floatstorage(result) else: raise Exception("op.result.concretetype is %r" % (RESTYPE,)) @@ -551,8 +551,8 @@ def as_object(self, x): return ootype.cast_to_object(x) - def as_float(self, x): - return cast_to_float(x) + def as_floatstorage(self, x): + return cast_to_floatstorage(x) def log_progress(self): count = sum(_stats.exec_counters.values()) @@ -830,7 +830,7 @@ elif T == llmemory.GCREF: args_in_order.append('r') _call_args_r.append(x) - elif T is lltype.Float: + elif T is longlong.FLOATSTORAGE: args_in_order.append('f') _call_args_f.append(x) else: @@ -893,7 +893,7 @@ set_future_value_int(i, args[i]) elif isinstance(TYPE, lltype.Ptr): set_future_value_ref(i, args[i]) - elif TYPE is lltype.Float: + elif TYPE is longlong.FLOATSTORAGE: set_future_value_float(i, args[i]) else: raise Exception("Nonsense type %s" % TYPE) @@ -1079,25 +1079,23 @@ def cast_from_ptr(TYPE, x): return lltype.cast_opaque_ptr(TYPE, x) -def cast_to_float(x): +def cast_to_floatstorage(x): if isinstance(x, float): - return x # common case + return longlong.getfloatstorage(x) # common case if IS_32_BIT: + assert longlong.supports_longlong if isinstance(x, r_longlong): - return longlong2float(x) + return x if isinstance(x, r_ulonglong): - return longlong2float(rffi.cast(lltype.SignedLongLong, x)) + return rffi.cast(lltype.SignedLongLong, x) raise TypeError(type(x)) -def cast_from_float(TYPE, x): - assert isinstance(x, float) +def cast_from_floatstorage(TYPE, x): + assert isinstance(x, longlong.r_float_storage) if TYPE is lltype.Float: - return x - if IS_32_BIT: - if TYPE is lltype.SignedLongLong: - return float2longlong(x) - if TYPE is lltype.UnsignedLongLong: - return r_ulonglong(float2longlong(x)) + return longlong.getrealfloat(x) + if longlong.is_longlong(TYPE): + return rffi.cast(TYPE, x) raise TypeError(TYPE) @@ -1126,6 +1124,7 @@ set_future_value_ref(index, value) def set_future_value_float(index, value): + assert isinstance(value, longlong.r_float_storage) set_future_value_ref(index, value) def set_future_value_ref(index, value): @@ -1164,7 +1163,7 @@ frame = _from_opaque(frame) assert num >= 0 x = frame.fail_args[num] - assert lltype.typeOf(x) is lltype.Float + assert lltype.typeOf(x) is longlong.FLOATSTORAGE return x def frame_ptr_getvalue(frame, num): @@ -1302,11 +1301,11 @@ def do_getarrayitem_gc_float(array, index): array = array._obj.container - return cast_to_float(array.getitem(index)) + return cast_to_floatstorage(array.getitem(index)) def do_getarrayitem_raw_float(array, index): array = array.adr.ptr._obj - return cast_to_float(array.getitem(index)) + return cast_to_floatstorage(array.getitem(index)) def do_getarrayitem_gc_ptr(array, index): array = array._obj.container @@ -1321,7 +1320,7 @@ return cast_to_int(_getfield_gc(struct, fieldnum)) def do_getfield_gc_float(struct, fieldnum): - return cast_to_float(_getfield_gc(struct, fieldnum)) + return cast_to_floatstorage(_getfield_gc(struct, fieldnum)) def do_getfield_gc_ptr(struct, fieldnum): return cast_to_ptr(_getfield_gc(struct, fieldnum)) @@ -1335,7 +1334,7 @@ return cast_to_int(_getfield_raw(struct, fieldnum)) def do_getfield_raw_float(struct, fieldnum): - return cast_to_float(_getfield_raw(struct, fieldnum)) + return cast_to_floatstorage(_getfield_raw(struct, fieldnum)) def do_getfield_raw_ptr(struct, fieldnum): return cast_to_ptr(_getfield_raw(struct, fieldnum)) @@ -1365,13 +1364,13 @@ def do_setarrayitem_gc_float(array, index, newvalue): array = array._obj.container ITEMTYPE = lltype.typeOf(array).OF - newvalue = cast_from_float(ITEMTYPE, newvalue) + newvalue = cast_from_floatstorage(ITEMTYPE, newvalue) array.setitem(index, newvalue) def do_setarrayitem_raw_float(array, index, newvalue): array = array.adr.ptr ITEMTYPE = lltype.typeOf(array).TO.OF - newvalue = cast_from_int(ITEMTYPE, newvalue) + newvalue = cast_from_floatstorage(ITEMTYPE, newvalue) array._obj.setitem(index, newvalue) def do_setarrayitem_gc_ptr(array, index, newvalue): @@ -1391,7 +1390,7 @@ STRUCT, fieldname = symbolic.TokenToField[fieldnum] ptr = lltype.cast_opaque_ptr(lltype.Ptr(STRUCT), struct) FIELDTYPE = getattr(STRUCT, fieldname) - newvalue = cast_from_float(FIELDTYPE, newvalue) + newvalue = cast_from_floatstorage(FIELDTYPE, newvalue) setattr(ptr, fieldname, newvalue) def do_setfield_gc_ptr(struct, fieldnum, newvalue): @@ -1412,7 +1411,7 @@ STRUCT, fieldname = symbolic.TokenToField[fieldnum] ptr = cast_from_int(lltype.Ptr(STRUCT), struct) FIELDTYPE = getattr(STRUCT, fieldname) - newvalue = cast_from_float(FIELDTYPE, newvalue) + newvalue = cast_from_floatstorage(FIELDTYPE, newvalue) setattr(ptr, fieldname, newvalue) def do_setfield_raw_ptr(struct, fieldnum, newvalue): @@ -1469,6 +1468,7 @@ kind2TYPE = { 'i': lltype.Signed, 'f': lltype.Float, + 'L': lltype.SignedLongLong, 'v': lltype.Void, } @@ -1509,7 +1509,7 @@ def do_call_float(f): x = _do_call_common(f) - return cast_to_float(x) + return cast_to_floatstorage(x) def do_call_ptr(f): x = _do_call_common(f) @@ -1538,14 +1538,12 @@ assert n == 'r' x = argsiter_r.next() x = cast_from_ptr(TYPE, x) - elif TYPE is lltype.Float or ( - IS_32_BIT and TYPE in (lltype.SignedLongLong, - lltype.UnsignedLongLong)): + elif TYPE is lltype.Float or longlong.is_longlong(TYPE): if args_in_order is not None: n = orderiter.next() assert n == 'f' x = argsiter_f.next() - x = cast_from_float(TYPE, x) + x = cast_from_floatstorage(TYPE, x) else: if args_in_order is not None: n = orderiter.next() @@ -1649,6 +1647,13 @@ s_CompiledLoop = annmodel.SomePtr(COMPILEDLOOP) s_Frame = annmodel.SomePtr(FRAME) +if longlong.FLOATSTORAGE is lltype.Float: + s_FloatStorage = annmodel.SomeFloat() +elif longlong.FLOATSTORAGE is lltype.SignedLongLong: + s_FloatStorage = annmodel.SomeInteger(knowntype=longlong.r_float_storage) +else: + assert 0 + setannotation(compile_start, s_CompiledLoop) setannotation(compile_start_int_var, annmodel.SomeInteger()) setannotation(compile_start_ref_var, annmodel.SomeInteger()) @@ -1677,7 +1682,7 @@ setannotation(frame_execute, annmodel.SomeInteger()) setannotation(frame_int_getvalue, annmodel.SomeInteger()) setannotation(frame_ptr_getvalue, annmodel.SomePtr(llmemory.GCREF)) -setannotation(frame_float_getvalue, annmodel.SomeFloat()) +setannotation(frame_float_getvalue, s_FloatStorage) setannotation(frame_get_value_count, annmodel.SomeInteger()) setannotation(frame_clear_latest_values, annmodel.s_None) @@ -1693,15 +1698,15 @@ setannotation(do_unicodegetitem, annmodel.SomeInteger()) setannotation(do_getarrayitem_gc_int, annmodel.SomeInteger()) setannotation(do_getarrayitem_gc_ptr, annmodel.SomePtr(llmemory.GCREF)) -setannotation(do_getarrayitem_gc_float, annmodel.SomeFloat()) +setannotation(do_getarrayitem_gc_float, s_FloatStorage) setannotation(do_getarrayitem_raw_int, annmodel.SomeInteger()) -setannotation(do_getarrayitem_raw_float, annmodel.SomeFloat()) +setannotation(do_getarrayitem_raw_float, s_FloatStorage) setannotation(do_getfield_gc_int, annmodel.SomeInteger()) setannotation(do_getfield_gc_ptr, annmodel.SomePtr(llmemory.GCREF)) -setannotation(do_getfield_gc_float, annmodel.SomeFloat()) +setannotation(do_getfield_gc_float, s_FloatStorage) setannotation(do_getfield_raw_int, annmodel.SomeInteger()) setannotation(do_getfield_raw_ptr, annmodel.SomePtr(llmemory.GCREF)) -setannotation(do_getfield_raw_float, annmodel.SomeFloat()) +setannotation(do_getfield_raw_float, s_FloatStorage) setannotation(do_new, annmodel.SomePtr(llmemory.GCREF)) setannotation(do_new_array, annmodel.SomePtr(llmemory.GCREF)) setannotation(do_setarrayitem_gc_int, annmodel.s_None) @@ -1723,5 +1728,5 @@ setannotation(do_call_pushptr, annmodel.s_None) setannotation(do_call_int, annmodel.SomeInteger()) setannotation(do_call_ptr, annmodel.SomePtr(llmemory.GCREF)) -setannotation(do_call_float, annmodel.SomeFloat()) +setannotation(do_call_float, s_FloatStorage) setannotation(do_call_void, annmodel.s_None) diff --git a/py/_plugin/pytest_terminal.py b/py/_plugin/pytest_terminal.py deleted file mode 100644 --- a/py/_plugin/pytest_terminal.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -Implements terminal reporting of the full testing process. - -This is a good source for looking at the various reporting hooks. -""" -import py -import sys - -optionalhook = py.test.mark.optionalhook - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting", "reporting", after="general") - group._addoption('-v', '--verbose', action="count", - dest="verbose", default=0, help="increase verbosity."), - group._addoption('-r', - action="store", dest="reportchars", default=None, metavar="chars", - help="show extra test summary info as specified by chars (f)ailed, " - "(s)skipped, (x)failed, (X)passed.") - group._addoption('-l', '--showlocals', - action="store_true", dest="showlocals", default=False, - help="show locals in tracebacks (disabled by default).") - group._addoption('--report', - action="store", dest="report", default=None, metavar="opts", - help="(deprecated, use -r)") - group._addoption('--tb', metavar="style", - action="store", dest="tbstyle", default='long', - type="choice", choices=['long', 'short', 'no', 'line'], - help="traceback print mode (long/short/line/no).") - group._addoption('--fulltrace', - action="store_true", dest="fulltrace", default=False, - help="don't cut any tracebacks (default is to cut).") - group._addoption('--funcargs', - action="store_true", dest="showfuncargs", default=False, - help="show available function arguments, sorted by plugin") - -def pytest_configure(config): - if config.option.collectonly: - reporter = CollectonlyReporter(config) - elif config.option.showfuncargs: - config.setsessionclass(ShowFuncargSession) - reporter = None - else: - reporter = TerminalReporter(config) - if reporter: - # XXX see remote.py's XXX - for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth': - if hasattr(config, attr): - #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr) - name = attr.split("_")[-1] - assert hasattr(self.reporter._tw, name), name - setattr(reporter._tw, name, getattr(config, attr)) - config.pluginmanager.register(reporter, 'terminalreporter') - -def getreportopt(config): - reportopts = "" - optvalue = config.getvalue("report") - if optvalue: - py.builtin.print_("DEPRECATED: use -r instead of --report option.", - file=py.std.sys.stderr) - if optvalue: - for setting in optvalue.split(","): - setting = setting.strip() - if setting == "skipped": - reportopts += "s" - elif setting == "xfailed": - reportopts += "x" - reportchars = config.getvalue("reportchars") - if reportchars: - for char in reportchars: - if char not in reportopts: - reportopts += char - return reportopts - -class TerminalReporter: - def __init__(self, config, file=None): - self.config = config - self.stats = {} - self.curdir = py.path.local() - if file is None: - file = py.std.sys.stdout - self._tw = py.io.TerminalWriter(file) - self.currentfspath = None - self.gateway2info = {} - self.reportchars = getreportopt(config) - - def hasopt(self, char): - char = {'xfailed': 'x', 'skipped': 's'}.get(char,char) - return char in self.reportchars - - def write_fspath_result(self, fspath, res): - fspath = self.curdir.bestrelpath(fspath) - if fspath != self.currentfspath: - self._tw.line() - relpath = self.curdir.bestrelpath(fspath) - self._tw.write(relpath + " ") - self.currentfspath = fspath - self._tw.write(res) - - def write_ensure_prefix(self, prefix, extra="", **kwargs): - if self.currentfspath != prefix: - self._tw.line() - self.currentfspath = prefix - self._tw.write(prefix) - if extra: - self._tw.write(extra, **kwargs) - self.currentfspath = -2 - - def ensure_newline(self): - if self.currentfspath: - self._tw.line() - self.currentfspath = None - - def write_line(self, line, **markup): - line = str(line) - self.ensure_newline() - self._tw.line(line, **markup) - - def write_sep(self, sep, title=None, **markup): - self.ensure_newline() - self._tw.sep(sep, title, **markup) - - def getcategoryletterword(self, rep): - res = self.config.hook.pytest_report_teststatus(report=rep) - if res: - return res - for cat in 'skipped failed passed ???'.split(): - if getattr(rep, cat, None): - break - return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep) - - def getoutcomeletter(self, rep): - return rep.shortrepr - - def getoutcomeword(self, rep): - if rep.passed: - return "PASS", dict(green=True) - elif rep.failed: - return "FAIL", dict(red=True) - elif rep.skipped: - return "SKIP" - else: - return "???", dict(red=True) - - def gettestid(self, item, relative=True): - fspath = item.fspath - chain = [x for x in item.listchain() if x.fspath == fspath] - chain = chain[1:] - names = [x.name for x in chain if x.name != "()"] - path = item.fspath - if relative: - relpath = path.relto(self.curdir) - if relpath: - path = relpath - names.insert(0, str(path)) - return "::".join(names) - - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.write_line("INTERNALERROR> " + line) - - def pytest_plugin_registered(self, plugin): - if self.config.option.traceconfig: - msg = "PLUGIN registered: %s" %(plugin,) - # XXX this event may happen during setup/teardown time - # which unfortunately captures our output here - # which garbles our output if we use self.write_line - self.write_line(msg) - - @optionalhook - def pytest_gwmanage_newgateway(self, gateway, platinfo): - #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec)) - d = {} - d['version'] = repr_pythonversion(platinfo.version_info) - d['id'] = gateway.id - d['spec'] = gateway.spec._spec - d['platform'] = platinfo.platform - if self.config.option.verbose: - d['extra'] = "- " + platinfo.executable - else: - d['extra'] = "" - d['cwd'] = platinfo.cwd - infoline = ("[%(id)s] %(spec)s -- platform %(platform)s, " - "Python %(version)s " - "cwd: %(cwd)s" - "%(extra)s" % d) - self.write_line(infoline) - self.gateway2info[gateway] = infoline - - @optionalhook - def pytest_testnodeready(self, node): - self.write_line("[%s] txnode ready to receive tests" %(node.gateway.id,)) - - @optionalhook - def pytest_testnodedown(self, node, error): - if error: - self.write_line("[%s] node down, error: %s" %(node.gateway.id, error)) - - @optionalhook - def pytest_rescheduleitems(self, items): - if self.config.option.debug: - self.write_sep("!", "RESCHEDULING %s " %(items,)) - - @optionalhook - def pytest_looponfailinfo(self, failreports, rootdirs): - if failreports: - self.write_sep("#", "LOOPONFAILING", red=True) - for report in failreports: - loc = self._getcrashline(report) - self.write_line(loc, red=True) - self.write_sep("#", "waiting for changes") - for rootdir in rootdirs: - self.write_line("### Watching: %s" %(rootdir,), bold=True) - - - def pytest_trace(self, category, msg): - if self.config.option.debug or \ - self.config.option.traceconfig and category.find("config") != -1: - self.write_line("[%s] %s" %(category, msg)) - - def pytest_deselected(self, items): - self.stats.setdefault('deselected', []).append(items) - - def pytest_itemstart(self, item, node=None): - if getattr(self.config.option, 'dist', 'no') != "no": - # for dist-testing situations itemstart means we - # queued the item for sending, not interesting (unless debugging) - if self.config.option.debug: - line = self._reportinfoline(item) - extra = "" - if node: - extra = "-> [%s]" % node.gateway.id - self.write_ensure_prefix(line, extra) - else: - if self.config.option.verbose: - line = self._reportinfoline(item) - self.write_ensure_prefix(line, "") - else: - # ensure that the path is printed before the - # 1st test of a module starts running - - self.write_fspath_result(self._getfspath(item), "") - - def pytest__teardown_final_logerror(self, report): - self.stats.setdefault("error", []).append(report) - - def pytest_runtest_logreport(self, report): - rep = report - cat, letter, word = self.getcategoryletterword(rep) - if not letter and not word: - # probably passed setup/teardown - return - if isinstance(word, tuple): - word, markup = word - else: - markup = {} - self.stats.setdefault(cat, []).append(rep) - if not self.config.option.verbose: - self.write_fspath_result(self._getfspath(rep.item), letter) - else: - line = self._reportinfoline(rep.item) - if not hasattr(rep, 'node'): - self.write_ensure_prefix(line, word, **markup) - else: - self.ensure_newline() - if hasattr(rep, 'node'): - self._tw.write("[%s] " % rep.node.gateway.id) - self._tw.write(word, **markup) - self._tw.write(" " + line) - self.currentfspath = -2 - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.stats.setdefault("error", []).append(report) - msg = report.longrepr.reprcrash.message - self.write_fspath_result(report.collector.fspath, "E") - elif report.skipped: - self.stats.setdefault("skipped", []).append(report) - self.write_fspath_result(report.collector.fspath, "S") - - def pytest_sessionstart(self, session): - self.write_sep("=", "test session starts", bold=True) - self._sessionstarttime = py.std.time.time() - - verinfo = ".".join(map(str, sys.version_info[:3])) - msg = "platform %s -- Python %s" % (sys.platform, verinfo) - msg += " -- pytest-%s" % (py.__version__) - if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None): - msg += " -- " + str(sys.executable) - self.write_line(msg) - lines = self.config.hook.pytest_report_header(config=self.config) - lines.reverse() - for line in flatten(lines): - self.write_line(line) - for i, testarg in enumerate(self.config.args): - self.write_line("test object %d: %s" %(i+1, testarg)) - - def pytest_sessionfinish(self, exitstatus, __multicall__): - __multicall__.execute() - self._tw.line("") - if exitstatus in (0, 1, 2): - self.summary_errors() - self.summary_failures() - self.config.hook.pytest_terminal_summary(terminalreporter=self) - if exitstatus == 2: - self._report_keyboardinterrupt() - self.summary_deselected() - self.summary_stats() - - def pytest_keyboard_interrupt(self, excinfo): - self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) - - def _report_keyboardinterrupt(self): - excrepr = self._keyboardinterrupt_memo - msg = excrepr.reprcrash.message - self.write_sep("!", msg) - if "KeyboardInterrupt" in msg: - if self.config.getvalue("fulltrace"): - excrepr.toterminal(self._tw) - else: - excrepr.reprcrash.toterminal(self._tw) - - def _getcrashline(self, report): - try: - return report.longrepr.reprcrash - except AttributeError: - return str(report.longrepr)[:50] - - def _reportinfoline(self, item): - collect_fspath = self._getfspath(item) - fspath, lineno, msg = self._getreportinfo(item) - if fspath and fspath != collect_fspath: - fspath = "%s <- %s" % ( - self.curdir.bestrelpath(collect_fspath), - self.curdir.bestrelpath(fspath)) - elif fspath: - fspath = self.curdir.bestrelpath(fspath) - if lineno is not None: - lineno += 1 - if fspath and lineno and msg: - line = "%(fspath)s:%(lineno)s: %(msg)s" - elif fspath and msg: - line = "%(fspath)s: %(msg)s" - elif fspath and lineno: - line = "%(fspath)s:%(lineno)s %(extrapath)s" - else: - line = "[noreportinfo]" - return line % locals() + " " - - def _getfailureheadline(self, rep): - if hasattr(rep, "collector"): - return str(rep.collector.fspath) - elif hasattr(rep, 'item'): - fspath, lineno, msg = self._getreportinfo(rep.item) - return msg - else: - return "test session" - - def _getreportinfo(self, item): - try: - return item.__reportinfo - except AttributeError: - pass - reportinfo = item.config.hook.pytest_report_iteminfo(item=item) - # cache on item - item.__reportinfo = reportinfo - return reportinfo - - def _getfspath(self, item): - try: - return item.fspath - except AttributeError: - fspath, lineno, msg = self._getreportinfo(item) - return fspath - - # - # summaries for sessionfinish - # - - def summary_failures(self): - tbstyle = self.config.getvalue("tbstyle") - if 'failed' in self.stats and tbstyle != "no": - self.write_sep("=", "FAILURES") - for rep in self.stats['failed']: - if tbstyle == "line": - line = self._getcrashline(rep) - self.write_line(line) - else: - msg = self._getfailureheadline(rep) - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def summary_errors(self): - if 'error' in self.stats and self.config.option.tbstyle != "no": - self.write_sep("=", "ERRORS") - for rep in self.stats['error']: - msg = self._getfailureheadline(rep) - if not hasattr(rep, 'when'): - # collect - msg = "ERROR during collection " + msg - elif rep.when == "setup": - msg = "ERROR at setup of " + msg - elif rep.when == "teardown": - msg = "ERROR at teardown of " + msg - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def write_platinfo(self, rep): - if hasattr(rep, 'node'): - self.write_line(self.gateway2info.get( - rep.node.gateway, - "node %r (platinfo not found? strange)") - [:self._tw.fullwidth-1]) - - def summary_stats(self): - session_duration = py.std.time.time() - self._sessionstarttime - - keys = "failed passed skipped deselected".split() - for key in self.stats.keys(): - if key not in keys: - keys.append(key) - parts = [] - for key in keys: - val = self.stats.get(key, None) - if val: - parts.append("%d %s" %(len(val), key)) - line = ", ".join(parts) - # XXX coloring - self.write_sep("=", "%s in %.2f seconds" %(line, session_duration)) - - def summary_deselected(self): - if 'deselected' in self.stats: - self.write_sep("=", "%d tests deselected by %r" %( - len(self.stats['deselected']), self.config.option.keyword), bold=True) - - -class CollectonlyReporter: - INDENT = " " - - def __init__(self, config, out=None): - self.config = config - if out is None: - out = py.std.sys.stdout - self.out = py.io.TerminalWriter(out) - self.indent = "" - self._failed = [] - - def outindent(self, line): - self.out.line(self.indent + str(line)) - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.out.line("INTERNALERROR> " + line) - - def pytest_collectstart(self, collector): - self.outindent(collector) - self.indent += self.INDENT - - def pytest_itemstart(self, item, node=None): - self.outindent(item) - - def pytest_collectreport(self, report): - if not report.passed: - self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message) - self._failed.append(report) - self.indent = self.indent[:-len(self.INDENT)] - - def pytest_sessionfinish(self, session, exitstatus): - if self._failed: - self.out.sep("!", "collection failures") - for rep in self._failed: - rep.toterminal(self.out) - - -def repr_pythonversion(v=None): - if v is None: - v = sys.version_info - try: - return "%s.%s.%s-%s-%s" % v - except (TypeError, ValueError): - return str(v) - -def flatten(l): - for x in l: - if isinstance(x, (list, tuple)): - for y in flatten(x): - yield y - else: - yield x - -from py._test.session import Session -class ShowFuncargSession(Session): - def main(self, colitems): - self.fspath = py.path.local() - self.sessionstarts() - try: - self.showargs(colitems[0]) - finally: - self.sessionfinishes(exitstatus=1) - - def showargs(self, colitem): - tw = py.io.TerminalWriter() - from py._test.funcargs import getplugins - from py._test.funcargs import FuncargRequest - plugins = getplugins(colitem, withpy=True) - verbose = self.config.getvalue("verbose") - for plugin in plugins: - available = [] - for name, factory in vars(plugin).items(): - if name.startswith(FuncargRequest._argprefix): - name = name[len(FuncargRequest._argprefix):] - if name not in available: - available.append([name, factory]) - if available: - pluginname = plugin.__name__ - for name, factory in available: - loc = self.getlocation(factory) - if verbose: - funcargspec = "%s -- %s" %(name, loc,) - else: - funcargspec = name - tw.line(funcargspec, green=True) - doc = factory.__doc__ or "" - if doc: - for line in doc.split("\n"): - tw.line(" " + line.strip()) - else: - tw.line(" %s: no docstring available" %(loc,), - red=True) - - def getlocation(self, function): - import inspect - fn = py.path.local(inspect.getfile(function)) - lineno = py.builtin._getcode(function).co_firstlineno - if fn.relto(self.fspath): - fn = fn.relto(self.fspath) - return "%s:%d" %(fn, lineno+1) diff --git a/py/_path/gateway/__init__.py b/py/_path/gateway/__init__.py deleted file mode 100644 --- a/py/_path/gateway/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/py/_plugin/pytest_recwarn.py b/py/_plugin/pytest_recwarn.py deleted file mode 100644 --- a/py/_plugin/pytest_recwarn.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -helpers for asserting deprecation and other warnings. - -Example usage ---------------------- - -You can use the ``recwarn`` funcarg to track -warnings within a test function: - -.. sourcecode:: python - - def test_hello(recwarn): - from warnings import warn - warn("hello", DeprecationWarning) - w = recwarn.pop(DeprecationWarning) - assert issubclass(w.category, DeprecationWarning) - assert 'hello' in str(w.message) - assert w.filename - assert w.lineno - -You can also call a global helper for checking -taht a certain function call yields a Deprecation -warning: - -.. sourcecode:: python - - import py - - def test_global(): - py.test.deprecated_call(myfunction, 17) - - -""" - -import py -import os - -def pytest_funcarg__recwarn(request): - """Return a WarningsRecorder instance that provides these methods: - - * ``pop(category=None)``: return last warning matching the category. - * ``clear()``: clear list of warnings - """ - warnings = WarningsRecorder() - request.addfinalizer(warnings.finalize) - return warnings - -def pytest_namespace(): - return {'deprecated_call': deprecated_call} - -def deprecated_call(func, *args, **kwargs): - """ assert that calling func(*args, **kwargs) - triggers a DeprecationWarning. - """ - warningmodule = py.std.warnings - l = [] - oldwarn_explicit = getattr(warningmodule, 'warn_explicit') - def warn_explicit(*args, **kwargs): - l.append(args) - oldwarn_explicit(*args, **kwargs) - oldwarn = getattr(warningmodule, 'warn') - def warn(*args, **kwargs): - l.append(args) - oldwarn(*args, **kwargs) - - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - try: - ret = func(*args, **kwargs) - finally: - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - if not l: - #print warningmodule - __tracebackhide__ = True - raise AssertionError("%r did not produce DeprecationWarning" %(func,)) - return ret - - -class RecordedWarning: - def __init__(self, message, category, filename, lineno, line): - self.message = message - self.category = category - self.filename = filename - self.lineno = lineno - self.line = line - -class WarningsRecorder: - def __init__(self): - warningmodule = py.std.warnings - self.list = [] - def showwarning(message, category, filename, lineno, line=0): - self.list.append(RecordedWarning( - message, category, filename, lineno, line)) - try: - self.old_showwarning(message, category, - filename, lineno, line=line) - except TypeError: - # < python2.6 - self.old_showwarning(message, category, filename, lineno) - self.old_showwarning = warningmodule.showwarning - warningmodule.showwarning = showwarning - - def pop(self, cls=Warning): - """ pop the first recorded warning, raise exception if not exists.""" - for i, w in enumerate(self.list): - if issubclass(w.category, cls): - return self.list.pop(i) - __tracebackhide__ = True - assert 0, "%r not found in %r" %(cls, self.list) - - #def resetregistry(self): - # import warnings - # warnings.onceregistry.clear() - # warnings.__warningregistry__.clear() - - def clear(self): - self.list[:] = [] - - def finalize(self): - py.std.warnings.showwarning = self.old_showwarning diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/py/bin/py.svnwcrevert b/py/bin/py.svnwcrevert deleted file mode 100755 --- a/py/bin/py.svnwcrevert +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pysvnwcrevert() \ No newline at end of file diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimisation, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/py/_plugin/pytest_pdb.py b/py/_plugin/pytest_pdb.py deleted file mode 100644 --- a/py/_plugin/pytest_pdb.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -interactive debugging with the Python Debugger. -""" -import py -import pdb, sys, linecache - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--pdb', - action="store_true", dest="usepdb", default=False, - help="start the interactive Python debugger on errors.") - -def pytest_configure(config): - if config.getvalue("usepdb"): - config.pluginmanager.register(PdbInvoke(), 'pdb') - -class PdbInvoke: - def pytest_runtest_makereport(self, item, call): - if call.excinfo and not \ - call.excinfo.errisinstance(py.test.skip.Exception): - # play well with capturing, slightly hackish - capman = item.config.pluginmanager.getplugin('capturemanager') - capman.suspendcapture() - - tw = py.io.TerminalWriter() - repr = call.excinfo.getrepr() - repr.toterminal(tw) - post_mortem(call.excinfo._excinfo[2]) - - capman.resumecapture_item(item) - -class Pdb(py.std.pdb.Pdb): - def do_list(self, arg): - self.lastcmd = 'list' - last = None - if arg: - try: - x = eval(arg, {}, {}) - if type(x) == type(()): - first, last = x - first = int(first) - last = int(last) - if last < first: - # Assume it's a count - last = first + last - else: - first = max(1, int(x) - 5) - except: - print ('*** Error in argument: %s' % repr(arg)) - return - elif self.lineno is None: - first = max(1, self.curframe.f_lineno - 5) - else: - first = self.lineno + 1 - if last is None: - last = first + 10 - filename = self.curframe.f_code.co_filename - breaklist = self.get_file_breaks(filename) - try: - for lineno in range(first, last+1): - # start difference from normal do_line - line = self._getline(filename, lineno) - # end difference from normal do_line - if not line: - print ('[EOF]') - break - else: - s = repr(lineno).rjust(3) - if len(s) < 4: s = s + ' ' - if lineno in breaklist: s = s + 'B' - else: s = s + ' ' - if lineno == self.curframe.f_lineno: - s = s + '->' - sys.stdout.write(s + '\t' + line) - self.lineno = lineno - except KeyboardInterrupt: - pass - do_l = do_list - - def _getline(self, filename, lineno): - if hasattr(filename, "__source__"): - try: - return filename.__source__.lines[lineno - 1] + "\n" - except IndexError: - return None - return linecache.getline(filename, lineno) - - def get_stack(self, f, t): - # Modified from bdb.py to be able to walk the stack beyond generators, - # which does not work in the normal pdb :-( - stack, i = pdb.Pdb.get_stack(self, f, t) - if f is None: - i = max(0, len(stack) - 1) - while i and stack[i][0].f_locals.get("__tracebackhide__", False): - i-=1 - return stack, i - -def post_mortem(t): - p = Pdb() - p.reset() - p.interaction(None, t) - -def set_trace(): - # again, a copy of the version in pdb.py - Pdb().set_trace(sys._getframe().f_back) diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/py/_plugin/pytest_unittest.py b/py/_plugin/pytest_unittest.py deleted file mode 100644 --- a/py/_plugin/pytest_unittest.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -automatically discover and run traditional "unittest.py" style tests. - -Usage ----------------- - -This plugin collects and runs Python `unittest.py style`_ tests. -It will automatically collect ``unittest.TestCase`` subclasses -and their ``test`` methods from the test modules of a project -(usually following the ``test_*.py`` pattern). - -This plugin is enabled by default. - -.. _`unittest.py style`: http://docs.python.org/library/unittest.html -""" -import py -import sys - -def pytest_pycollect_makeitem(collector, name, obj): - if 'unittest' not in sys.modules: - return # nobody derived unittest.TestCase - try: - isunit = issubclass(obj, py.std.unittest.TestCase) - except KeyboardInterrupt: - raise - except Exception: - pass - else: - if isunit: - return UnitTestCase(name, parent=collector) - -class UnitTestCase(py.test.collect.Class): - def collect(self): - return [UnitTestCaseInstance("()", self)] - - def setup(self): - pass - - def teardown(self): - pass - -_dummy = object() -class UnitTestCaseInstance(py.test.collect.Instance): - def collect(self): - loader = py.std.unittest.TestLoader() - names = loader.getTestCaseNames(self.obj.__class__) - l = [] - for name in names: - callobj = getattr(self.obj, name) - if py.builtin.callable(callobj): - l.append(UnitTestFunction(name, parent=self)) - return l - - def _getobj(self): - x = self.parent.obj - return self.parent.obj(methodName='run') - -class UnitTestFunction(py.test.collect.Function): - def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None): - super(UnitTestFunction, self).__init__(name, parent) - self._args = args - if obj is not _dummy: - self._obj = obj - self._sort_value = sort_value - if hasattr(self.parent, 'newinstance'): - self.parent.newinstance() - self.obj = self._getobj() - - def runtest(self): - target = self.obj - args = self._args - target(*args) - - def setup(self): - instance = py.builtin._getimself(self.obj) - instance.setUp() - - def teardown(self): - instance = py.builtin._getimself(self.obj) - instance.tearDown() - diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py --- a/pypy/jit/codewriter/jtransform.py +++ b/pypy/jit/codewriter/jtransform.py @@ -5,7 +5,7 @@ from pypy.objspace.flow.model import SpaceOperation, Variable, Constant from pypy.objspace.flow.model import Block, Link, c_last_exception from pypy.jit.codewriter.flatten import ListOfKind, IndirectCallTargets -from pypy.jit.codewriter import support, heaptracker +from pypy.jit.codewriter import support, heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.codewriter.policy import log from pypy.jit.metainterp.typesystem import deref, arrayItem @@ -39,7 +39,6 @@ def optimize_block(self, block): if block.operations == (): return - self.remove_longlong_constants(block) self.vable_array_vars = {} self.vable_flags = {} renamings = {} @@ -135,55 +134,6 @@ block.exits = block.exits[:1] block.exitswitch = None - def remove_longlong_constants(self, block): - # remove all Constant({Un}signedLongLong), and replace them with - # cast_int_to_longlong(Constant(Signed)) or - # two_ints_to_longlong(Constant(Signed), Constant(Signed)). - operations = [] - all_constants = {} - # - def _get_const_as_var(c): - v = all_constants.get(c) - if v is None: - from pypy.rlib.rarithmetic import intmask - v = varoftype(c.concretetype) - value = int(c.value) - c_hi = Constant(intmask(value >> 32), lltype.Signed) - c_lo = Constant(intmask(value), lltype.Signed) - if c_lo.value == value: - # a long long constant, but it fits in 32 bits - op1 = SpaceOperation('cast_int_to_longlong', [c_lo], v) - else: - # a 64-bit long long constant, requires two ints - op1 = SpaceOperation('two_ints_to_longlong', [c_lo, c_hi], - v) - operations.append(op1) - all_constants[c] = v - return v - # - for op in block.operations: - for i, v in enumerate(op.args): - if (isinstance(v, Constant) and - self._is_longlong(v.concretetype)): - args = op.args[:] - args[i] = _get_const_as_var(v) - op = SpaceOperation(op.opname, args, op.result) - operations.append(op) - # - last_op = None - if block.exitswitch == c_last_exception: - last_op = operations.pop() - for link in block.exits: - for i, v in enumerate(link.args): - if (isinstance(v, Constant) and - self._is_longlong(v.concretetype)): - args = link.args[:] - args[i] = _get_const_as_var(v) - link.args = args - if last_op is not None: - operations.append(last_op) - block.operations = operations - # ---------- def follow_constant_exit(self, block): @@ -849,17 +799,6 @@ # and unsupported ones are turned into a call to a function from # jit.codewriter.support. - if lltype.SignedLongLong != lltype.Signed: - @staticmethod - def _is_longlong(TYPE): - return (TYPE == lltype.SignedLongLong or - TYPE == lltype.UnsignedLongLong) - else: - # on 64-bit, _is_longlong() returns always False - @staticmethod - def _is_longlong(TYPE): - return False - for _op, _oopspec in [('llong_invert', 'INVERT'), ('ullong_invert', 'INVERT'), ('llong_lt', 'LT'), @@ -894,8 +833,7 @@ ('truncate_longlong_to_int', 'TO_INT'), ('cast_float_to_longlong', 'FROM_FLOAT'), ('cast_longlong_to_float', 'TO_FLOAT'), - # internal pseuso-operation: - ('two_ints_to_longlong', 'FROM_TWO_INTS'), + ('cast_uint_to_longlong', 'FROM_UINT'), ]: exec py.code.Source(''' def rewrite_op_%s(self, op): @@ -904,8 +842,10 @@ op2 = self._handle_oopspec_call(op1, args, EffectInfo.OS_LLONG_%s, EffectInfo.EF_PURE) + if %r == "TO_INT": + assert op2.result.concretetype == lltype.Signed return op2 - ''' % (_op, _oopspec.lower(), _oopspec)).compile() + ''' % (_op, _oopspec.lower(), _oopspec, _oopspec)).compile() def _normalize(self, oplist): if isinstance(oplist, SpaceOperation): @@ -937,22 +877,30 @@ rewrite_op_ullong_is_true = rewrite_op_llong_is_true def rewrite_op_cast_primitive(self, op): - fromll = self._is_longlong(op.args[0].concretetype) - toll = self._is_longlong(op.result.concretetype) + fromll = longlong.is_longlong(op.args[0].concretetype) + toll = longlong.is_longlong(op.result.concretetype) if fromll != toll: args = op.args if fromll: opname = 'truncate_longlong_to_int' + RESULT = lltype.Signed else: from pypy.rpython.lltypesystem import rffi if rffi.cast(op.args[0].concretetype, -1) < 0: opname = 'cast_int_to_longlong' else: - opname = 'two_ints_to_longlong' - c_hi = Constant(0, lltype.Signed) - args = [args[0], c_hi] - op1 = SpaceOperation(opname, args, op.result) - return self.rewrite_operation(op1) + opname = 'cast_uint_to_longlong' + RESULT = lltype.SignedLongLong + v = varoftype(RESULT) + op1 = SpaceOperation(opname, args, v) + op2 = self.rewrite_operation(op1) + # + # force a renaming to put the correct result in place, even though + # it might be slightly mistyped (e.g. Signed versus Unsigned) + assert op2.result is v + op2.result = op.result + # + return op2 # ---------- # Renames, from the _old opname to the _new one. @@ -1303,7 +1251,7 @@ return op1 def _register_extra_helper(self, oopspecindex, oopspec_name, - argtypes, resulttype): + argtypes, resulttype, effectinfo): # a bit hackish if self.callcontrol.callinfocollection.has_oopspec(oopspecindex): return @@ -1313,7 +1261,8 @@ op = SpaceOperation('pseudo_call_cannot_raise', [c_func] + [varoftype(T) for T in argtypes], varoftype(resulttype)) - calldescr = self.callcontrol.getcalldescr(op, oopspecindex) + calldescr = self.callcontrol.getcalldescr(op, oopspecindex, + effectinfo) if isinstance(c_func.value, str): # in tests only func = c_func.value else: @@ -1372,11 +1321,15 @@ if args[0].concretetype.TO == rstr.UNICODE: otherindex += EffectInfo._OS_offset_uni self._register_extra_helper(otherindex, othername, - argtypes, resulttype) + argtypes, resulttype, + EffectInfo.EF_PURE) # - return self._handle_oopspec_call(op, args, dict[oopspec_name]) + return self._handle_oopspec_call(op, args, dict[oopspec_name], + EffectInfo.EF_PURE) def _handle_str2unicode_call(self, op, oopspec_name, args): + # ll_str2unicode is not EF_PURE, because it can raise + # UnicodeDecodeError... return self._handle_oopspec_call(op, args, EffectInfo.OS_STR2UNICODE) # ---------- diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/py/_compat/__init__.py b/py/_compat/__init__.py deleted file mode 100644 --- a/py/_compat/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" compatibility modules (taken from 2.4.4) """ - diff --git a/py/_compat/dep_subprocess.py b/py/_compat/dep_subprocess.py deleted file mode 100644 --- a/py/_compat/dep_subprocess.py +++ /dev/null @@ -1,5 +0,0 @@ - -import py -py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", -stacklevel="apipkg") -subprocess = py.std.subprocess diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimisation is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/py/_plugin/pytest_pytester.py b/py/_plugin/pytest_pytester.py deleted file mode 100644 --- a/py/_plugin/pytest_pytester.py +++ /dev/null @@ -1,500 +0,0 @@ -""" -funcargs and support code for testing py.test's own functionality. -""" - -import py -import sys, os -import re -import inspect -import time -from py._test.config import Config as pytestConfig -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("pylib") - group.addoption('--tools-on-path', - action="store_true", dest="toolsonpath", default=False, - help=("discover tools on PATH instead of going through py.cmdline.") - ) - -pytest_plugins = '_pytest' - -def pytest_funcarg__linecomp(request): - return LineComp() - -def pytest_funcarg__LineMatcher(request): - return LineMatcher - -def pytest_funcarg__testdir(request): - tmptestdir = TmpTestdir(request) - return tmptestdir - -rex_outcome = re.compile("(\d+) (\w+)") -class RunResult: - def __init__(self, ret, outlines, errlines, duration): - self.ret = ret - self.outlines = outlines - self.errlines = errlines - self.stdout = LineMatcher(outlines) - self.stderr = LineMatcher(errlines) - self.duration = duration - - def parseoutcomes(self): - for line in reversed(self.outlines): - if 'seconds' in line: - outcomes = rex_outcome.findall(line) - if outcomes: - d = {} - for num, cat in outcomes: - d[cat] = int(num) - return d - -class TmpTestdir: - def __init__(self, request): - self.request = request - self._pytest = request.getfuncargvalue("_pytest") - # XXX remove duplication with tmpdir plugin - basetmp = request.config.ensuretemp("testdir") - name = request.function.__name__ - for i in range(100): - try: - tmpdir = basetmp.mkdir(name + str(i)) - except py.error.EEXIST: - continue - break - # we need to create another subdir - # because Directory.collect() currently loads - # conftest.py from sibling directories - self.tmpdir = tmpdir.mkdir(name) - self.plugins = [] - self._syspathremove = [] - self.chdir() # always chdir - self.request.addfinalizer(self.finalize) - - def __repr__(self): - return "" % (self.tmpdir,) - - def Config(self, topdir=None): - if topdir is None: - topdir = self.tmpdir.dirpath() - return pytestConfig(topdir=topdir) - - def finalize(self): - for p in self._syspathremove: - py.std.sys.path.remove(p) - if hasattr(self, '_olddir'): - self._olddir.chdir() - # delete modules that have been loaded from tmpdir - for name, mod in list(sys.modules.items()): - if mod: - fn = getattr(mod, '__file__', None) - if fn and fn.startswith(str(self.tmpdir)): - del sys.modules[name] - - def getreportrecorder(self, obj): - if hasattr(obj, 'config'): - obj = obj.config - if hasattr(obj, 'hook'): - obj = obj.hook - assert hasattr(obj, '_hookspecs'), obj - reprec = ReportRecorder(obj) - reprec.hookrecorder = self._pytest.gethookrecorder(obj) - reprec.hook = reprec.hookrecorder.hook - return reprec - - def chdir(self): - old = self.tmpdir.chdir() - if not hasattr(self, '_olddir'): - self._olddir = old - - def _makefile(self, ext, args, kwargs): - items = list(kwargs.items()) - if args: - source = "\n".join(map(str, args)) + "\n" - basename = self.request.function.__name__ - items.insert(0, (basename, source)) - ret = None - for name, value in items: - p = self.tmpdir.join(name).new(ext=ext) - source = str(py.code.Source(value)).lstrip() - p.write(source.encode("utf-8"), "wb") - if ret is None: - ret = p - return ret - - - def makefile(self, ext, *args, **kwargs): - return self._makefile(ext, args, kwargs) - - def makeconftest(self, source): - return self.makepyfile(conftest=source) - - def makepyfile(self, *args, **kwargs): - return self._makefile('.py', args, kwargs) - - def maketxtfile(self, *args, **kwargs): - return self._makefile('.txt', args, kwargs) - - def syspathinsert(self, path=None): - if path is None: - path = self.tmpdir - py.std.sys.path.insert(0, str(path)) - self._syspathremove.append(str(path)) - - def mkdir(self, name): - return self.tmpdir.mkdir(name) - - def mkpydir(self, name): - p = self.mkdir(name) - p.ensure("__init__.py") - return p - - def genitems(self, colitems): - return list(self.session.genitems(colitems)) - - def inline_genitems(self, *args): - #config = self.parseconfig(*args) - config = self.parseconfig(*args) - session = config.initsession() - rec = self.getreportrecorder(config) - colitems = [config.getnode(arg) for arg in config.args] - items = list(session.genitems(colitems)) - return items, rec - - def runitem(self, source): - # used from runner functional tests - item = self.getitem(source) - # the test class where we are called from wants to provide the runner - testclassinstance = py.builtin._getimself(self.request.function) - runner = testclassinstance.getrunner() - return runner(item) - - def inline_runsource(self, source, *cmdlineargs): - p = self.makepyfile(source) - l = list(cmdlineargs) + [p] - return self.inline_run(*l) - - def inline_runsource1(self, *args): - args = list(args) - source = args.pop() - p = self.makepyfile(source) - l = list(args) + [p] - reprec = self.inline_run(*l) - reports = reprec.getreports("pytest_runtest_logreport") - assert len(reports) == 1, reports - return reports[0] - - def inline_run(self, *args): - args = ("-s", ) + args # otherwise FD leakage - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - session = config.initsession() - reprec = self.getreportrecorder(config) - colitems = config.getinitialnodes() - session.main(colitems) - config.pluginmanager.do_unconfigure(config) - return reprec - - def config_preparse(self): - config = self.Config() - for plugin in self.plugins: - if isinstance(plugin, str): - config.pluginmanager.import_plugin(plugin) - else: - if isinstance(plugin, dict): - plugin = PseudoPlugin(plugin) - if not config.pluginmanager.isregistered(plugin): - config.pluginmanager.register(plugin) - return config - - def parseconfig(self, *args): - if not args: - args = (self.tmpdir,) - config = self.config_preparse() - args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')] - config.parse(args) - return config - - def reparseconfig(self, args=None): - """ this is used from tests that want to re-invoke parse(). """ - if not args: - args = [self.tmpdir] - from py._test import config - oldconfig = config.config_per_process # py.test.config - try: - c = config.config_per_process = py.test.config = pytestConfig() - c.basetemp = oldconfig.mktemp("reparse", numbered=True) - c.parse(args) - return c - finally: - config.config_per_process = py.test.config = oldconfig - - def parseconfigure(self, *args): - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - return config - - def getitem(self, source, funcname="test_func"): - modcol = self.getmodulecol(source) - moditems = modcol.collect() - for item in modcol.collect(): - if item.name == funcname: - return item - else: - assert 0, "%r item not found in module:\n%s" %(funcname, source) - - def getitems(self, source): - modcol = self.getmodulecol(source) - return list(modcol.config.initsession().genitems([modcol])) - #assert item is not None, "%r item not found in module:\n%s" %(funcname, source) - #return item - - def getfscol(self, path, configargs=()): - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - return self.config.getnode(path) - - def getmodulecol(self, source, configargs=(), withinit=False): - kw = {self.request.function.__name__: py.code.Source(source).strip()} - path = self.makepyfile(**kw) - if withinit: - self.makepyfile(__init__ = "#") - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - #self.config.pluginmanager.do_configure(config=self.config) - # XXX - self.config.pluginmanager.import_plugin("runner") - plugin = self.config.pluginmanager.getplugin("runner") - plugin.pytest_configure(config=self.config) - - return self.config.getnode(path) - - def popen(self, cmdargs, stdout, stderr, **kw): - if not hasattr(py.std, 'subprocess'): - py.test.skip("no subprocess module") - env = os.environ.copy() - env['PYTHONPATH'] = ":".join(filter(None, [ - str(os.getcwd()), env.get('PYTHONPATH', '')])) - kw['env'] = env - #print "env", env - return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) - - def run(self, *cmdargs): - return self._run(*cmdargs) - - def _run(self, *cmdargs): - cmdargs = [str(x) for x in cmdargs] - p1 = self.tmpdir.join("stdout") - p2 = self.tmpdir.join("stderr") - print_("running", cmdargs, "curdir=", py.path.local()) - f1 = p1.open("wb") - f2 = p2.open("wb") - now = time.time() - popen = self.popen(cmdargs, stdout=f1, stderr=f2, - close_fds=(sys.platform != "win32")) - ret = popen.wait() - f1.close() - f2.close() - out = p1.read("rb") - out = getdecoded(out).splitlines() - err = p2.read("rb") - err = getdecoded(err).splitlines() - def dump_lines(lines, fp): - try: - for line in lines: - py.builtin.print_(line, file=fp) - except UnicodeEncodeError: - print("couldn't print to %s because of encoding" % (fp,)) - dump_lines(out, sys.stdout) - dump_lines(err, sys.stderr) - return RunResult(ret, out, err, time.time()-now) - - def runpybin(self, scriptname, *args): - fullargs = self._getpybinargs(scriptname) + args - return self.run(*fullargs) - - def _getpybinargs(self, scriptname): - if self.request.config.getvalue("toolsonpath"): - script = py.path.local.sysfind(scriptname) - assert script, "script %r not found" % scriptname - return (script,) - else: - cmdlinename = scriptname.replace(".", "") - assert hasattr(py.cmdline, cmdlinename), cmdlinename - source = ("import sys;sys.path.insert(0,%r);" - "import py;py.cmdline.%s()" % - (str(py._pydir.dirpath()), cmdlinename)) - return (sys.executable, "-c", source,) - - def runpython(self, script): - s = self._getsysprepend() - if s: - script.write(s + "\n" + script.read()) - return self.run(sys.executable, script) - - def _getsysprepend(self): - if not self.request.config.getvalue("toolsonpath"): - s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath()) - else: - s = "" - return s - - def runpython_c(self, command): - command = self._getsysprepend() + command - return self.run(py.std.sys.executable, "-c", command) - - def runpytest(self, *args): - p = py.path.local.make_numbered_dir(prefix="runpytest-", - keep=None, rootdir=self.tmpdir) - args = ('--basetemp=%s' % p, ) + args - plugins = [x for x in self.plugins if isinstance(x, str)] - if plugins: - args = ('-p', plugins[0]) + args - return self.runpybin("py.test", *args) - - def spawn_pytest(self, string, expect_timeout=10.0): - pexpect = py.test.importorskip("pexpect", "2.4") - if not self.request.config.getvalue("toolsonpath"): - py.test.skip("need --tools-on-path to run py.test script") - basetemp = self.tmpdir.mkdir("pexpect") - invoke = self._getpybinargs("py.test")[0] - cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) - child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w")) - child.timeout = expect_timeout - return child - -def getdecoded(out): - try: - return out.decode("utf-8") - except UnicodeDecodeError: - return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( - py.io.saferepr(out),) - -class PseudoPlugin: - def __init__(self, vars): - self.__dict__.update(vars) - -class ReportRecorder(object): - def __init__(self, hook): - self.hook = hook - self.registry = hook._registry - self.registry.register(self) - - def getcall(self, name): - return self.hookrecorder.getcall(name) - - def popcall(self, name): - return self.hookrecorder.popcall(name) - - def getcalls(self, names): - """ return list of ParsedCall instances matching the given eventname. """ - return self.hookrecorder.getcalls(names) - - # functionality for test reports - - def getreports(self, names="pytest_runtest_logreport pytest_collectreport"): - return [x.report for x in self.getcalls(names)] - - def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"): - """ return a testreport whose dotted import path matches """ - l = [] - for rep in self.getreports(names=names): - colitem = rep.getnode() - if not inamepart or inamepart in colitem.listnames(): - l.append(rep) - if not l: - raise ValueError("could not find test report matching %r: no test reports at all!" % - (inamepart,)) - if len(l) > 1: - raise ValueError("found more than one testreport matching %r: %s" %( - inamepart, l)) - return l[0] - - def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'): - return [rep for rep in self.getreports(names) if rep.failed] - - def getfailedcollections(self): - return self.getfailures('pytest_collectreport') - - def listoutcomes(self): - passed = [] - skipped = [] - failed = [] - for rep in self.getreports("pytest_runtest_logreport"): - if rep.passed: - if rep.when == "call": - passed.append(rep) - elif rep.skipped: - skipped.append(rep) - elif rep.failed: - failed.append(rep) - return passed, skipped, failed - - def countoutcomes(self): - return [len(x) for x in self.listoutcomes()] - - def assertoutcome(self, passed=0, skipped=0, failed=0): - realpassed, realskipped, realfailed = self.listoutcomes() - assert passed == len(realpassed) - assert skipped == len(realskipped) - assert failed == len(realfailed) - - def clear(self): - self.hookrecorder.calls[:] = [] - - def unregister(self): - self.registry.unregister(self) - self.hookrecorder.finish_recording() - -class LineComp: - def __init__(self): - self.stringio = py.io.TextIO() - - def assert_contains_lines(self, lines2): - """ assert that lines2 are contained (linearly) in lines1. - return a list of extralines found. - """ - __tracebackhide__ = True - val = self.stringio.getvalue() - self.stringio.truncate(0) - self.stringio.seek(0) - lines1 = val.split("\n") - return LineMatcher(lines1).fnmatch_lines(lines2) - -class LineMatcher: - def __init__(self, lines): - self.lines = lines - - def str(self): - return "\n".join(self.lines) - - def fnmatch_lines(self, lines2): - if isinstance(lines2, str): - lines2 = py.code.Source(lines2) - if isinstance(lines2, py.code.Source): - lines2 = lines2.strip().lines - - from fnmatch import fnmatch - lines1 = self.lines[:] - nextline = None - extralines = [] - __tracebackhide__ = True - for line in lines2: - nomatchprinted = False - while lines1: - nextline = lines1.pop(0) - if line == nextline: - print_("exact match:", repr(line)) - break - elif fnmatch(nextline, line): - print_("fnmatch:", repr(line)) - print_(" with:", repr(nextline)) - break - else: - if not nomatchprinted: - print_("nomatch:", repr(line)) - nomatchprinted = True - print_(" and:", repr(nextline)) - extralines.append(nextline) - else: - assert line == nextline diff --git a/py/_plugin/pytest_monkeypatch.py b/py/_plugin/pytest_monkeypatch.py deleted file mode 100644 --- a/py/_plugin/pytest_monkeypatch.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -safely patch object attributes, dicts and environment variables. - -Usage ----------------- - -Use the `monkeypatch funcarg`_ to tweak your global test environment -for running a particular test. You can safely set/del an attribute, -dictionary item or environment variable by respective methods -on the monkeypatch funcarg. If you want e.g. to set an ENV1 variable -and have os.path.expanduser return a particular directory, you can -write it down like this: - -.. sourcecode:: python - - def test_mytest(monkeypatch): - monkeypatch.setenv('ENV1', 'myval') - monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz') - ... # your test code that uses those patched values implicitely - -After the test function finished all modifications will be undone, -because the ``monkeypatch.undo()`` method is registered as a finalizer. - -``monkeypatch.setattr/delattr/delitem/delenv()`` all -by default raise an Exception if the target does not exist. -Pass ``raising=False`` if you want to skip this check. - -prepending to PATH or other environment variables ---------------------------------------------------------- - -To prepend a value to an already existing environment parameter: - -.. sourcecode:: python - - def test_mypath_finding(monkeypatch): - monkeypatch.setenv('PATH', 'x/y', prepend=":") - # in bash language: export PATH=x/y:$PATH - -calling "undo" finalization explicitely ------------------------------------------ - -At the end of function execution py.test invokes -a teardown hook which undoes all monkeypatch changes. -If you do not want to wait that long you can call -finalization explicitely:: - - monkeypatch.undo() - -This will undo previous changes. This call consumes the -undo stack. Calling it a second time has no effect unless -you start monkeypatching after the undo call. - -.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/ -""" - -import py, os, sys - -def pytest_funcarg__monkeypatch(request): - """The returned ``monkeypatch`` funcarg provides these - helper methods to modify objects, dictionaries or os.environ:: - - monkeypatch.setattr(obj, name, value, raising=True) - monkeypatch.delattr(obj, name, raising=True) - monkeypatch.setitem(mapping, name, value) - monkeypatch.delitem(obj, name, raising=True) - monkeypatch.setenv(name, value, prepend=False) - monkeypatch.delenv(name, value, raising=True) - monkeypatch.syspath_prepend(path) - - All modifications will be undone when the requesting - test function finished its execution. The ``raising`` - parameter determines if a KeyError or AttributeError - will be raised if the set/deletion operation has no target. - """ - monkeypatch = MonkeyPatch() - request.addfinalizer(monkeypatch.undo) - return monkeypatch - -notset = object() - -class MonkeyPatch: - def __init__(self): - self._setattr = [] - self._setitem = [] - - def setattr(self, obj, name, value, raising=True): - oldval = getattr(obj, name, notset) - if raising and oldval is notset: - raise AttributeError("%r has no attribute %r" %(obj, name)) - self._setattr.insert(0, (obj, name, oldval)) - setattr(obj, name, value) - - def delattr(self, obj, name, raising=True): - if not hasattr(obj, name): - if raising: - raise AttributeError(name) - else: - self._setattr.insert(0, (obj, name, getattr(obj, name, notset))) - delattr(obj, name) - - def setitem(self, dic, name, value): - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - dic[name] = value - - def delitem(self, dic, name, raising=True): - if name not in dic: - if raising: - raise KeyError(name) - else: - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - del dic[name] - - def setenv(self, name, value, prepend=None): - value = str(value) - if prepend and name in os.environ: - value = value + prepend + os.environ[name] - self.setitem(os.environ, name, value) - - def delenv(self, name, raising=True): - self.delitem(os.environ, name, raising=raising) - - def syspath_prepend(self, path): - if not hasattr(self, '_savesyspath'): - self._savesyspath = sys.path[:] - sys.path.insert(0, str(path)) - - def undo(self): - for obj, name, value in self._setattr: - if value is not notset: - setattr(obj, name, value) - else: - delattr(obj, name) - self._setattr[:] = [] - for dictionary, name, value in self._setitem: - if value is notset: - del dictionary[name] - else: - dictionary[name] = value - self._setitem[:] = [] - if hasattr(self, '_savesyspath'): - sys.path[:] = self._savesyspath diff --git a/py/_code/oldmagic.py b/py/_code/oldmagic.py deleted file mode 100644 --- a/py/_code/oldmagic.py +++ /dev/null @@ -1,62 +0,0 @@ -""" deprecated module for turning on/off some features. """ - -import py - -from py.builtin import builtins as cpy_builtin - -def invoke(assertion=False, compile=False): - """ (deprecated) invoke magic, currently you can specify: - - assertion patches the builtin AssertionError to try to give - more meaningful AssertionErrors, which by means - of deploying a mini-interpreter constructs - a useful error message. - """ - py.log._apiwarn("1.1", - "py.magic.invoke() is deprecated, use py.code.patch_builtins()", - stacklevel=2, - ) - py.code.patch_builtins(assertion=assertion, compile=compile) - -def revoke(assertion=False, compile=False): - """ (deprecated) revoke previously invoked magic (see invoke()).""" - py.log._apiwarn("1.1", - "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()", - stacklevel=2, - ) - py.code.unpatch_builtins(assertion=assertion, compile=compile) - -patched = {} - -def patch(namespace, name, value): - """ (deprecated) rebind the 'name' on the 'namespace' to the 'value', - possibly and remember the original value. Multiple - invocations to the same namespace/name pair will - remember a list of old values. - """ - py.log._apiwarn("1.1", - "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - orig = getattr(namespace, name) - patched.setdefault(nref, []).append(orig) - setattr(namespace, name, value) - return orig - -def revert(namespace, name): - """ (deprecated) revert to the orginal value the last patch modified. - Raise ValueError if no such original value exists. - """ - py.log._apiwarn("1.1", - "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - if nref not in patched or not patched[nref]: - raise ValueError("No original value stored for %s.%s" % nref) - current = getattr(namespace, name) - orig = patched[nref].pop() - setattr(namespace, name, orig) - return current - diff --git a/py/bin/win32/py.lookup.cmd b/py/bin/win32/py.lookup.cmd deleted file mode 100644 --- a/py/bin/win32/py.lookup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.lookup" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be speficied: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepacy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracked notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/py/bin/win32/py.convert_unittest.cmd b/py/bin/win32/py.convert_unittest.cmd deleted file mode 100644 --- a/py/bin/win32/py.convert_unittest.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.convert_unittest" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/py/bin/win32/py.test.cmd b/py/bin/win32/py.test.cmd deleted file mode 100644 --- a/py/bin/win32/py.test.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.test" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/py/_plugin/pytest_hooklog.py b/py/_plugin/pytest_hooklog.py deleted file mode 100644 --- a/py/_plugin/pytest_hooklog.py +++ /dev/null @@ -1,33 +0,0 @@ -""" log invocations of extension hooks to a file. """ -import py - -def pytest_addoption(parser): - parser.addoption("--hooklog", dest="hooklog", default=None, - help="write hook calls to the given file.") - -def pytest_configure(config): - hooklog = config.getvalue("hooklog") - if hooklog: - config._hooklogfile = open(hooklog, 'w') - config._hooklog_oldperformcall = config.hook._performcall - config.hook._performcall = (lambda name, multicall: - logged_call(name=name, multicall=multicall, config=config)) - -def logged_call(name, multicall, config): - f = config._hooklogfile - f.write("%s(**%s)\n" % (name, multicall.kwargs)) - try: - res = config._hooklog_oldperformcall(name=name, multicall=multicall) - except: - f.write("-> exception") - raise - f.write("-> %r" % (res,)) - return res - -def pytest_unconfigure(config): - try: - del config.hook.__dict__['_performcall'] - except KeyError: - pass - else: - config._hooklogfile.close() diff --git a/py/_cmdline/pycleanup.py b/py/_cmdline/pycleanup.py deleted file mode 100755 --- a/py/_cmdline/pycleanup.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.cleanup [PATH] ... - -Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot. Optionally remove setup.py related files and empty -directories. - -""" -import py -import sys, subprocess - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - parser.add_option("-e", metavar="ENDING", - dest="endings", default=[".pyc", "$py.class"], action="append", - help=("(multi) recursively remove files with the given ending." - " '.pyc' and '$py.class' are in the default list.")) - parser.add_option("-d", action="store_true", dest="removedir", - help="remove empty directories.") - parser.add_option("-s", action="store_true", dest="setup", - help="remove 'build' and 'dist' directories next to setup.py files") - parser.add_option("-a", action="store_true", dest="all", - help="synonym for '-S -d -e pip-log.txt'") - parser.add_option("-n", "--dryrun", dest="dryrun", default=False, - action="store_true", - help="don't actually delete but display would-be-removed filenames.") - (options, args) = parser.parse_args() - - Cleanup(options, args).main() - -class Cleanup: - def __init__(self, options, args): - if not args: - args = ["."] - self.options = options - self.args = [py.path.local(x) for x in args] - if options.all: - options.setup = True - options.removedir = True - options.endings.append("pip-log.txt") - - def main(self): - if self.options.setup: - for arg in self.args: - self.setupclean(arg) - - for path in self.args: - py.builtin.print_("cleaning path", path, - "of extensions", self.options.endings) - for x in path.visit(self.shouldremove, self.recursedir): - self.remove(x) - if self.options.removedir: - for x in path.visit(lambda x: x.check(dir=1), self.recursedir): - if not x.listdir(): - self.remove(x) - - def shouldremove(self, p): - for ending in self.options.endings: - if p.basename.endswith(ending): - return True - - def recursedir(self, path): - return path.check(dotfile=0, link=0) - - def remove(self, path): - if not path.check(): - return - if self.options.dryrun: - py.builtin.print_("would remove", path) - else: - py.builtin.print_("removing", path) - path.remove() - - def XXXcallsetup(self, setup, *args): - old = setup.dirpath().chdir() - try: - subprocess.call([sys.executable, str(setup)] + list(args)) - finally: - old.chdir() - - def setupclean(self, path): - for x in path.visit("setup.py", self.recursedir): - basepath = x.dirpath() - self.remove(basepath / "build") - self.remove(basepath / "dist") diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py --- a/pypy/jit/metainterp/blackhole.py +++ b/pypy/jit/metainterp/blackhole.py @@ -7,7 +7,7 @@ from pypy.rpython.lltypesystem.lloperation import llop from pypy.rpython.llinterp import LLException from pypy.jit.codewriter.jitcode import JitCode, SwitchDictDescr -from pypy.jit.codewriter import heaptracker +from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.metainterp.jitexc import JitException, get_llexception, reraise from pypy.jit.metainterp.compile import ResumeAtPositionDescr @@ -204,7 +204,7 @@ assert argcodes[next_argcode] == '>' assert argcodes[next_argcode + 1] == 'f' next_argcode = next_argcode + 2 - assert lltype.typeOf(result) is lltype.Float + assert lltype.typeOf(result) is longlong.FLOATSTORAGE self.registers_f[ord(code[position])] = result position += 1 elif resulttype == 'L': @@ -252,7 +252,7 @@ if we_are_translated(): default_i = 0 default_r = NULL - default_f = 0.0 + default_f = longlong.ZEROF else: default_i = MissingValue() default_r = MissingValue() @@ -281,12 +281,15 @@ self.position = position def setarg_i(self, index, value): + assert lltype.typeOf(value) is lltype.Signed self.registers_i[index] = value def setarg_r(self, index, value): + assert lltype.typeOf(value) == llmemory.GCREF self.registers_r[index] = value def setarg_f(self, index, value): + assert lltype.typeOf(value) is longlong.FLOATSTORAGE self.registers_f[index] = value def run(self): @@ -535,52 +538,82 @@ @arguments("f", returns="f") def bhimpl_float_neg(a): - return -a + a = longlong.getrealfloat(a) + x = -a + return longlong.getfloatstorage(x) @arguments("f", returns="f") def bhimpl_float_abs(a): - return abs(a) + a = longlong.getrealfloat(a) + x = abs(a) + return longlong.getfloatstorage(x) @arguments("f", "f", returns="f") def bhimpl_float_add(a, b): - return a + b + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) + x = a + b + return longlong.getfloatstorage(x) @arguments("f", "f", returns="f") def bhimpl_float_sub(a, b): - return a - b + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) + x = a - b + return longlong.getfloatstorage(x) @arguments("f", "f", returns="f") def bhimpl_float_mul(a, b): - return a * b + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) + x = a * b + return longlong.getfloatstorage(x) @arguments("f", "f", returns="f") def bhimpl_float_truediv(a, b): - return a / b + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) + x = a / b + return longlong.getfloatstorage(x) @arguments("f", "f", returns="i") def bhimpl_float_lt(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a < b @arguments("f", "f", returns="i") def bhimpl_float_le(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a <= b @arguments("f", "f", returns="i") def bhimpl_float_eq(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a == b @arguments("f", "f", returns="i") def bhimpl_float_ne(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a != b @arguments("f", "f", returns="i") def bhimpl_float_gt(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a > b @arguments("f", "f", returns="i") def bhimpl_float_ge(a, b): + a = longlong.getrealfloat(a) + b = longlong.getrealfloat(b) return a >= b @arguments("f", returns="i") def bhimpl_cast_float_to_int(a): + a = longlong.getrealfloat(a) # note: we need to call int() twice to care for the fact that # int(-2147483648.0) returns a long :-( return int(int(a)) @arguments("i", returns="f") def bhimpl_cast_int_to_float(a): - return float(a) + x = float(a) + return longlong.getfloatstorage(x) # ---------- # control flow operations @@ -1271,10 +1304,13 @@ # connect the return of values from the called frame to the # 'xxx_call_yyy' instructions from the caller frame def _setup_return_value_i(self, result): + assert lltype.typeOf(result) is lltype.Signed self.registers_i[ord(self.jitcode.code[self.position-1])] = result def _setup_return_value_r(self, result): + assert lltype.typeOf(result) == llmemory.GCREF self.registers_r[ord(self.jitcode.code[self.position-1])] = result def _setup_return_value_f(self, result): + assert lltype.typeOf(result) is longlong.FLOATSTORAGE self.registers_f[ord(self.jitcode.code[self.position-1])] = result def _done_with_this_frame(self): @@ -1338,7 +1374,7 @@ for i in range(self.jitcode.num_regs_f()): box = miframe.registers_f[i] if box is not None: - self.setarg_f(i, box.getfloat()) + self.setarg_f(i, box.getfloatstorage()) # ____________________________________________________________ diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/py/_plugin/pytest_assertion.py b/py/_plugin/pytest_assertion.py deleted file mode 100644 --- a/py/_plugin/pytest_assertion.py +++ /dev/null @@ -1,28 +0,0 @@ -import py -import sys - -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group._addoption('--no-assert', action="store_true", default=False, - dest="noassert", - help="disable python assert expression reinterpretation."), - -def pytest_configure(config): - if not config.getvalue("noassert") and not config.getvalue("nomagic"): - warn_about_missing_assertion() - config._oldassertion = py.builtin.builtins.AssertionError - py.builtin.builtins.AssertionError = py.code._AssertionError - -def pytest_unconfigure(config): - if hasattr(config, '_oldassertion'): - py.builtin.builtins.AssertionError = config._oldassertion - del config._oldassertion - -def warn_about_missing_assertion(): - try: - assert False - except AssertionError: - pass - else: - py.std.warnings.warn("Assertions are turned off!" - " (are you using python -O?)") diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/py/_plugin/pytest_resultlog.py b/py/_plugin/pytest_resultlog.py deleted file mode 100644 --- a/py/_plugin/pytest_resultlog.py +++ /dev/null @@ -1,98 +0,0 @@ -"""non-xml machine-readable logging of test results. - Useful for buildbot integration code. See the `PyPy-test`_ - web page for post-processing. - -.. _`PyPy-test`: http://codespeak.net:8099/summary - -""" - -import py -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("resultlog", "resultlog plugin options") - group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None, - help="path for machine-readable result log.") - -def pytest_configure(config): - resultlog = config.option.resultlog - if resultlog: - logfile = open(resultlog, 'w', 1) # line buffered - config._resultlog = ResultLog(config, logfile) - config.pluginmanager.register(config._resultlog) - -def pytest_unconfigure(config): - resultlog = getattr(config, '_resultlog', None) - if resultlog: - resultlog.logfile.close() - del config._resultlog - config.pluginmanager.unregister(resultlog) - -def generic_path(item): - chain = item.listchain() - gpath = [chain[0].name] - fspath = chain[0].fspath - fspart = False - for node in chain[1:]: - newfspath = node.fspath - if newfspath == fspath: - if fspart: - gpath.append(':') - fspart = False - else: - gpath.append('.') - else: - gpath.append('/') - fspart = True - name = node.name - if name[0] in '([': - gpath.pop() - gpath.append(name) - fspath = newfspath - return ''.join(gpath) - -class ResultLog(object): - def __init__(self, config, logfile): - self.config = config - self.logfile = logfile # preferably line buffered - - def write_log_entry(self, testpath, shortrepr, longrepr): - print_("%s %s" % (shortrepr, testpath), file=self.logfile) - for line in longrepr.splitlines(): - print_(" %s" % line, file=self.logfile) - - def log_outcome(self, node, shortrepr, longrepr): - testpath = generic_path(node) - self.write_log_entry(testpath, shortrepr, longrepr) - - def pytest_runtest_logreport(self, report): - res = self.config.hook.pytest_report_teststatus(report=report) - if res is not None: - code = res[1] - else: - code = report.shortrepr - if code == 'x': - longrepr = str(report.longrepr) - elif code == 'X': - longrepr = '' - elif report.passed: - longrepr = "" - elif report.failed: - longrepr = str(report.longrepr) - elif report.skipped: - longrepr = str(report.longrepr.reprcrash.message) - self.log_outcome(report.item, code, longrepr) - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - code = "F" - else: - assert report.skipped - code = "S" - longrepr = str(report.longrepr.reprcrash) - self.log_outcome(report.collector, code, longrepr) - - def pytest_internalerror(self, excrepr): - path = excrepr.reprcrash.path - self.write_log_entry(path, '!', str(excrepr)) diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/py/_test/pluginmanager.py b/py/_test/pluginmanager.py deleted file mode 100644 --- a/py/_test/pluginmanager.py +++ /dev/null @@ -1,353 +0,0 @@ -""" -managing loading and interacting with pytest plugins. -""" -import py -import inspect -from py._plugin import hookspec - -default_plugins = ( - "default runner capture mark terminal skipping tmpdir monkeypatch " - "recwarn pdb pastebin unittest helpconfig nose assertion genscript " - "junitxml doctest").split() - -def check_old_use(mod, modname): - clsname = modname[len('pytest_'):].capitalize() + "Plugin" - assert not hasattr(mod, clsname), (mod, clsname) - -class PluginManager(object): - def __init__(self): - self.registry = Registry() - self._name2plugin = {} - self._hints = [] - self.hook = HookRelay([hookspec], registry=self.registry) - self.register(self) - for spec in default_plugins: - self.import_plugin(spec) - - def _getpluginname(self, plugin, name): - if name is None: - if hasattr(plugin, '__name__'): - name = plugin.__name__.split(".")[-1] - else: - name = id(plugin) - return name - - def register(self, plugin, name=None): - assert not self.isregistered(plugin), plugin - assert not self.registry.isregistered(plugin), plugin - name = self._getpluginname(plugin, name) - if name in self._name2plugin: - return False - self._name2plugin[name] = plugin - self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self}) - self.hook.pytest_plugin_registered(manager=self, plugin=plugin) - self.registry.register(plugin) - return True - - def unregister(self, plugin): - self.hook.pytest_plugin_unregistered(plugin=plugin) - self.registry.unregister(plugin) - for name, value in list(self._name2plugin.items()): - if value == plugin: - del self._name2plugin[name] - - def isregistered(self, plugin, name=None): - if self._getpluginname(plugin, name) in self._name2plugin: - return True - for val in self._name2plugin.values(): - if plugin == val: - return True - - def addhooks(self, spec): - self.hook._addhooks(spec, prefix="pytest_") - - def getplugins(self): - return list(self.registry) - - def skipifmissing(self, name): - if not self.hasplugin(name): - py.test.skip("plugin %r is missing" % name) - - def hasplugin(self, name): - try: - self.getplugin(name) - except KeyError: - return False - else: - return True - - def getplugin(self, name): - try: - return self._name2plugin[name] - except KeyError: - impname = canonical_importname(name) - return self._name2plugin[impname] - - # API for bootstrapping - # - def _envlist(self, varname): - val = py.std.os.environ.get(varname, None) - if val is not None: - return val.split(',') - return () - - def consider_env(self): - for spec in self._envlist("PYTEST_PLUGINS"): - self.import_plugin(spec) - - def consider_setuptools_entrypoints(self): - try: - from pkg_resources import iter_entry_points - except ImportError: - return # XXX issue a warning - for ep in iter_entry_points('pytest11'): - name = canonical_importname(ep.name) - if name in self._name2plugin: - continue - plugin = ep.load() - self.register(plugin, name=name) - - def consider_preparse(self, args): - for opt1,opt2 in zip(args, args[1:]): - if opt1 == "-p": - self.import_plugin(opt2) - - def consider_conftest(self, conftestmodule): - cls = getattr(conftestmodule, 'ConftestPlugin', None) - if cls is not None: - raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, " - "were removed in 1.0.0b2" % (cls,)) - if self.register(conftestmodule, name=conftestmodule.__file__): - self.consider_module(conftestmodule) - - def consider_module(self, mod): - attr = getattr(mod, "pytest_plugins", ()) - if attr: - if not isinstance(attr, (list, tuple)): - attr = (attr,) - for spec in attr: - self.import_plugin(spec) - - def import_plugin(self, spec): - assert isinstance(spec, str) - modname = canonical_importname(spec) - if modname in self._name2plugin: - return - try: - mod = importplugin(modname) - except KeyboardInterrupt: - raise - except py.test.skip.Exception: - e = py.std.sys.exc_info()[1] - self._hints.append("skipped plugin %r: %s" %((modname, e.msg))) - else: - check_old_use(mod, modname) - self.register(mod) - self.consider_module(mod) - - def pytest_terminal_summary(self, terminalreporter): - tw = terminalreporter._tw - if terminalreporter.config.option.traceconfig: - for hint in self._hints: - tw.line("hint: %s" % hint) - - # - # - # API for interacting with registered and instantiated plugin objects - # - # - def listattr(self, attrname, plugins=None): - return self.registry.listattr(attrname, plugins=plugins) - - def notify_exception(self, excinfo=None): - if excinfo is None: - excinfo = py.code.ExceptionInfo() - excrepr = excinfo.getrepr(funcargs=True, showlocals=True) - return self.hook.pytest_internalerror(excrepr=excrepr) - - def do_addoption(self, parser): - mname = "pytest_addoption" - methods = self.registry.listattr(mname, reverse=True) - mc = MultiCall(methods, {'parser': parser}) - mc.execute() - - def pytest_plugin_registered(self, plugin): - dic = self.call_plugin(plugin, "pytest_namespace", {}) or {} - for name, value in dic.items(): - setattr(py.test, name, value) - py.test.__all__.append(name) - if hasattr(self, '_config'): - self.call_plugin(plugin, "pytest_addoption", - {'parser': self._config._parser}) - self.call_plugin(plugin, "pytest_configure", - {'config': self._config}) - - def call_plugin(self, plugin, methname, kwargs): - return MultiCall( - methods=self.listattr(methname, plugins=[plugin]), - kwargs=kwargs, firstresult=True).execute() - - def do_configure(self, config): - assert not hasattr(self, '_config') - self._config = config - config.hook.pytest_configure(config=self._config) - - def do_unconfigure(self, config): - config = self._config - del self._config - config.hook.pytest_unconfigure(config=config) - config.pluginmanager.unregister(self) - -def canonical_importname(name): - name = name.lower() - modprefix = "pytest_" - if not name.startswith(modprefix): - name = modprefix + name - return name - -def importplugin(importspec): - try: - return __import__(importspec) - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - try: - return __import__("py._plugin.%s" %(importspec), - None, None, '__doc__') - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - # show the original exception, not the failing internal one - return __import__(importspec) - - -class MultiCall: - """ execute a call into multiple python functions/methods. """ - - def __init__(self, methods, kwargs, firstresult=False): - self.methods = methods[:] - self.kwargs = kwargs.copy() - self.kwargs['__multicall__'] = self - self.results = [] - self.firstresult = firstresult - - def __repr__(self): - status = "%d results, %d meths" % (len(self.results), len(self.methods)) - return "" %(status, self.kwargs) - - def execute(self): - while self.methods: - method = self.methods.pop() - kwargs = self.getkwargs(method) - res = method(**kwargs) - if res is not None: - self.results.append(res) - if self.firstresult: - return res - if not self.firstresult: - return self.results - - def getkwargs(self, method): - kwargs = {} - for argname in varnames(method): - try: - kwargs[argname] = self.kwargs[argname] - except KeyError: - pass # might be optional param - return kwargs - -def varnames(func): - ismethod = inspect.ismethod(func) - rawcode = py.code.getrawcode(func) - try: - return rawcode.co_varnames[ismethod:] - except AttributeError: - return () - -class Registry: - """ - Manage Plugins: register/unregister call calls to plugins. - """ - def __init__(self, plugins=None): - if plugins is None: - plugins = [] - self._plugins = plugins - - def register(self, plugin): - assert not isinstance(plugin, str) - assert not plugin in self._plugins - self._plugins.append(plugin) - - def unregister(self, plugin): - self._plugins.remove(plugin) - - def isregistered(self, plugin): - return plugin in self._plugins - - def __iter__(self): - return iter(self._plugins) - - def listattr(self, attrname, plugins=None, reverse=False): - l = [] - if plugins is None: - plugins = self._plugins - for plugin in plugins: - try: - l.append(getattr(plugin, attrname)) - except AttributeError: - continue - if reverse: - l.reverse() - return l - -class HookRelay: - def __init__(self, hookspecs, registry, prefix="pytest_"): - if not isinstance(hookspecs, list): - hookspecs = [hookspecs] - self._hookspecs = [] - self._registry = registry - for hookspec in hookspecs: - self._addhooks(hookspec, prefix) - - def _addhooks(self, hookspecs, prefix): - self._hookspecs.append(hookspecs) - added = False - for name, method in vars(hookspecs).items(): - if name.startswith(prefix): - if not method.__doc__: - raise ValueError("docstring required for hook %r, in %r" - % (method, hookspecs)) - firstresult = getattr(method, 'firstresult', False) - hc = HookCaller(self, name, firstresult=firstresult) - setattr(self, name, hc) - added = True - #print ("setting new hook", name) - if not added: - raise ValueError("did not find new %r hooks in %r" %( - prefix, hookspecs,)) - - - def _performcall(self, name, multicall): - return multicall.execute() - -class HookCaller: - def __init__(self, hookrelay, name, firstresult): - self.hookrelay = hookrelay - self.name = name - self.firstresult = firstresult - - def __repr__(self): - return "" %(self.name,) - - def __call__(self, **kwargs): - methods = self.hookrelay._registry.listattr(self.name) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - - def pcall(self, plugins, **kwargs): - methods = self.hookrelay._registry.listattr(self.name, plugins=plugins) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/py/_compat/dep_optparse.py b/py/_compat/dep_optparse.py deleted file mode 100644 --- a/py/_compat/dep_optparse.py +++ /dev/null @@ -1,4 +0,0 @@ -import py -py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg") - -optparse = py.std.optparse diff --git a/pypy/doc/config/objspace.usemodules.readline.txt b/pypy/doc/config/objspace.usemodules.readline.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.readline.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'readline' module. diff --git a/py/_cmdline/pylookup.py b/py/_cmdline/pylookup.py deleted file mode 100755 --- a/py/_cmdline/pylookup.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.lookup [search_directory] SEARCH_STRING [options] - -Looks recursively at Python files for a SEARCH_STRING, starting from the -present working directory. Prints the line, with the filename and line-number -prepended.""" - -import sys, os -import py -from py.io import ansi_print, get_terminal_width -import re - -def rec(p): - return p.check(dotfile=0) - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase", - help="ignore case distinctions") -parser.add_option("-C", "--context", action="store", type="int", dest="context", - default=0, help="How many lines of output to show") - -terminal_width = get_terminal_width() - -def find_indexes(search_line, string): - indexes = [] - before = 0 - while 1: - i = search_line.find(string, before) - if i == -1: - break - indexes.append(i) - before = i + len(string) - return indexes - -def main(): - (options, args) = parser.parse_args() - if len(args) == 2: - search_dir, string = args - search_dir = py.path.local(search_dir) - else: - search_dir = py.path.local() - string = args[0] - if options.ignorecase: - string = string.lower() - for x in search_dir.visit('*.py', rec): - # match filename directly - s = x.relto(search_dir) - if options.ignorecase: - s = s.lower() - if s.find(string) != -1: - sys.stdout.write("%s: filename matches %r" %(x, string) + "\n") - - try: - s = x.read() - except py.error.ENOENT: - pass # whatever, probably broken link (ie emacs lock) - searchs = s - if options.ignorecase: - searchs = s.lower() - if s.find(string) != -1: - lines = s.splitlines() - if options.ignorecase: - searchlines = s.lower().splitlines() - else: - searchlines = lines - for i, (line, searchline) in enumerate(zip(lines, searchlines)): - indexes = find_indexes(searchline, string) - if not indexes: - continue - if not options.context: - sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1)) - last_index = 0 - for index in indexes: - sys.stdout.write(line[last_index: index]) - ansi_print(line[index: index+len(string)], - file=sys.stdout, esc=31, newline=False) - last_index = index + len(string) - sys.stdout.write(line[last_index:] + "\n") - else: - context = (options.context)/2 - for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)): - print("%s:%d: %s" %(x.relto(search_dir), count+1, lines[count].rstrip())) - print("-" * terminal_width) diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/py/_test/cmdline.py b/py/_test/cmdline.py deleted file mode 100644 --- a/py/_test/cmdline.py +++ /dev/null @@ -1,24 +0,0 @@ -import py -import sys - -# -# main entry point -# - -def main(args=None): - if args is None: - args = sys.argv[1:] - config = py.test.config - try: - config.parse(args) - config.pluginmanager.do_configure(config) - session = config.initsession() - colitems = config.getinitialnodes() - exitstatus = session.main(colitems) - config.pluginmanager.do_unconfigure(config) - except config.Error: - e = sys.exc_info()[1] - sys.stderr.write("ERROR: %s\n" %(e.args[0],)) - exitstatus = 3 - py.test.config = py.test.config.__class__() - return exitstatus diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimisation is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/py/_cmdline/pywhich.py b/py/_cmdline/pywhich.py deleted file mode 100755 --- a/py/_cmdline/pywhich.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.which [name] - -print the location of the given python module or package name -""" - -import sys - -def main(): - name = sys.argv[1] - try: - mod = __import__(name) - except ImportError: - sys.stderr.write("could not import: " + name + "\n") - else: - try: - location = mod.__file__ - except AttributeError: - sys.stderr.write("module (has no __file__): " + str(mod)) - else: - print(location) diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/conftest.py b/pypy/conftest.py --- a/pypy/conftest.py +++ b/pypy/conftest.py @@ -1,4 +1,4 @@ -import py, sys, os, textwrap, types +import py, pytest, sys, os, textwrap, types from pypy.interpreter.gateway import app2interp_temp from pypy.interpreter.error import OperationError from pypy.interpreter.function import Method @@ -11,13 +11,20 @@ from pypy.tool import leakfinder # pytest settings -pytest_plugins = "resultlog", rsyncdirs = ['.', '../lib-python', '../lib_pypy', '../demo'] rsyncignore = ['_cache'] # PyPy's command line extra options (these are added # to py.test's standard options) # +option = None + +def pytest_report_header(): + return "pytest-%s from %s" %(pytest.__version__, pytest.__file__) + +def pytest_configure(config): + global option + option = config.option def _set_platform(opt, opt_str, value, parser): from pypy.config.translationoption import PLATFORMS @@ -26,8 +33,6 @@ raise ValueError("%s not in %s" % (value, PLATFORMS)) set_platform(value, None) -option = py.test.config.option - def pytest_addoption(parser): group = parser.getgroup("pypy options") group.addoption('--view', action="store_true", dest="view", default=False, @@ -130,7 +135,7 @@ py.test.skip("cannot runappdirect test: space needs %s = %s, "\ "while pypy-c was built with %s" % (key, value, has)) - for name in ('int', 'long', 'str', 'unicode'): + for name in ('int', 'long', 'str', 'unicode', 'None'): setattr(self, 'w_' + name, eval(name)) @@ -154,7 +159,7 @@ def str_w(self, w_str): return w_str - def newdict(self): + def newdict(self, module=None): return {} def newtuple(self, iterable): @@ -229,7 +234,7 @@ at the class) ourselves. """ def accept_regular_test(self): - if option.runappdirect: + if self.config.option.runappdirect: # only collect regular tests if we are in an 'app_test' directory, # or in test_lib_pypy names = self.listnames() @@ -261,7 +266,7 @@ if name.startswith('AppTest'): return AppClassCollector(name, parent=self) elif name.startswith('ExpectTest'): - if option.rundirect: + if self.config.option.rundirect: return py.test.collect.Class(name, parent=self) return ExpectClassCollector(name, parent=self) # XXX todo @@ -278,7 +283,7 @@ "generator app level functions? you must be joking" return AppTestFunction(name, parent=self) elif obj.func_code.co_flags & 32: # generator function - return self.Generator(name, parent=self) + return pytest.Generator(name, parent=self) else: return IntTestFunction(name, parent=self) @@ -350,11 +355,9 @@ _pygame_imported = False class IntTestFunction(py.test.collect.Function): - def _haskeyword(self, keyword): - return keyword == 'interplevel' or \ - super(IntTestFunction, self)._haskeyword(keyword) - def _keywords(self): - return super(IntTestFunction, self)._keywords() + ['interplevel'] + def __init__(self, *args, **kwargs): + super(IntTestFunction, self).__init__(*args, **kwargs) + self.keywords['interplevel'] = True def runtest(self): try: @@ -373,16 +376,13 @@ raise class AppTestFunction(py.test.collect.Function): + def __init__(self, *args, **kwargs): + super(AppTestFunction, self).__init__(*args, **kwargs) + self.keywords['applevel'] = True + def _prunetraceback(self, traceback): return traceback - def _haskeyword(self, keyword): - return keyword == 'applevel' or \ - super(AppTestFunction, self)._haskeyword(keyword) - - def _keywords(self): - return ['applevel'] + super(AppTestFunction, self)._keywords() - def execute_appex(self, space, target, *args): try: target(*args) @@ -397,7 +397,7 @@ def runtest(self): target = self.obj - if option.runappdirect: + if self.config.option.runappdirect: return target() space = gettestobjspace() filename = self._getdynfilename(target) @@ -422,7 +422,7 @@ space = instance.space for name in dir(instance): if name.startswith('w_'): - if option.runappdirect: + if self.config.option.runappdirect: setattr(instance, name[2:], getattr(instance, name)) else: obj = getattr(instance, name) @@ -440,7 +440,7 @@ def runtest(self): target = self.obj - if option.runappdirect: + if self.config.option.runappdirect: return target() space = target.im_self.space filename = self._getdynfilename(target) @@ -478,7 +478,7 @@ instance = self.obj space = instance.space w_class = self.parent.w_class - if option.runappdirect: + if self.config.option.runappdirect: self.w_instance = instance else: self.w_instance = space.call_function(w_class) @@ -498,7 +498,7 @@ cls = self.obj space = cls.space clsname = cls.__name__ - if option.runappdirect: + if self.config.option.runappdirect: w_class = cls else: w_class = space.call_function(space.w_type, @@ -513,6 +513,7 @@ s = s.replace("()", "paren") s = s.replace(".py", "") s = s.replace(".", "_") + s = s.replace(os.sep, "_") return s safe_name = staticmethod(safe_name) diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/py/_plugin/pytest_runner.py b/py/_plugin/pytest_runner.py deleted file mode 100644 --- a/py/_plugin/pytest_runner.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -collect and run test items and create reports. -""" - -import py, sys - -def pytest_namespace(): - return { - 'raises' : raises, - 'skip' : skip, - 'importorskip' : importorskip, - 'fail' : fail, - 'xfail' : xfail, - 'exit' : exit, - } - -# -# pytest plugin hooks - -# XXX move to pytest_sessionstart and fix py.test owns tests -def pytest_configure(config): - config._setupstate = SetupState() - -def pytest_sessionfinish(session, exitstatus): - if hasattr(session.config, '_setupstate'): - hook = session.config.hook - rep = hook.pytest__teardown_final(session=session) - if rep: - hook.pytest__teardown_final_logerror(report=rep) - -def pytest_make_collect_report(collector): - result = excinfo = None - try: - result = collector._memocollect() - except KeyboardInterrupt: - raise - except: - excinfo = py.code.ExceptionInfo() - return CollectReport(collector, result, excinfo) - -def pytest_runtest_protocol(item): - runtestprotocol(item) - return True - -def runtestprotocol(item, log=True): - rep = call_and_report(item, "setup", log) - reports = [rep] - if rep.passed: - reports.append(call_and_report(item, "call", log)) - reports.append(call_and_report(item, "teardown", log)) - return reports - -def pytest_runtest_setup(item): - item.config._setupstate.prepare(item) - -def pytest_runtest_call(item): - if not item._deprecated_testexecution(): - item.runtest() - -def pytest_runtest_makereport(item, call): - return ItemTestReport(item, call.excinfo, call.when) - -def pytest_runtest_teardown(item): - item.config._setupstate.teardown_exact(item) - -def pytest__teardown_final(session): - call = CallInfo(session.config._setupstate.teardown_all, when="teardown") - if call.excinfo: - ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir) - call.excinfo.traceback = ntraceback.filter() - rep = TeardownErrorReport(call.excinfo) - return rep - -def pytest_report_teststatus(report): - if report.when in ("setup", "teardown"): - if report.failed: - # category, shortletter, verbose-word - return "error", "E", "ERROR" - elif report.skipped: - return "skipped", "s", "SKIPPED" - else: - return "", "", "" -# -# Implementation - -def call_and_report(item, when, log=True): - call = call_runtest_hook(item, when) - hook = item.ihook - report = hook.pytest_runtest_makereport(item=item, call=call) - if log and (when == "call" or not report.passed): - hook.pytest_runtest_logreport(report=report) - return report - -def call_runtest_hook(item, when): - hookname = "pytest_runtest_" + when - ihook = getattr(item.ihook, hookname) - return CallInfo(lambda: ihook(item=item), when=when) - -class CallInfo: - excinfo = None - def __init__(self, func, when): - self.when = when - try: - self.result = func() - except KeyboardInterrupt: - raise - except: - self.excinfo = py.code.ExceptionInfo() - - def __repr__(self): - if self.excinfo: - status = "exception: %s" % str(self.excinfo.value) - else: - status = "result: %r" % (self.result,) - return "" % (self.when, status) - -class BaseReport(object): - def __repr__(self): - l = ["%s=%s" %(key, value) - for key, value in self.__dict__.items()] - return "<%s %s>" %(self.__class__.__name__, " ".join(l),) - - def toterminal(self, out): - longrepr = self.longrepr - if hasattr(longrepr, 'toterminal'): - longrepr.toterminal(out) - else: - out.line(str(longrepr)) - -class ItemTestReport(BaseReport): - failed = passed = skipped = False - - def __init__(self, item, excinfo=None, when=None): - self.item = item - self.when = when - if item and when != "setup": - self.keywords = item.readkeywords() - else: - # if we fail during setup it might mean - # we are not able to access the underlying object - # this might e.g. happen if we are unpickled - # and our parent collector did not collect us - # (because it e.g. skipped for platform reasons) - self.keywords = {} - if not excinfo: - self.passed = True - self.shortrepr = "." - else: - if not isinstance(excinfo, py.code.ExceptionInfo): - self.failed = True - shortrepr = "?" - longrepr = excinfo - elif excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - shortrepr = "s" - longrepr = self.item._repr_failure_py(excinfo) - else: - self.failed = True - shortrepr = self.item.shortfailurerepr - if self.when == "call": - longrepr = self.item.repr_failure(excinfo) - else: # exception in setup or teardown - longrepr = self.item._repr_failure_py(excinfo) - shortrepr = shortrepr.lower() - self.shortrepr = shortrepr - self.longrepr = longrepr - - def __repr__(self): - status = (self.passed and "passed" or - self.skipped and "skipped" or - self.failed and "failed" or - "CORRUPT") - l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,] - if hasattr(self, 'node'): - l.append("txnode=%s" % self.node.gateway.id) - info = " " .join(map(str, l)) - return "" % info - - def getnode(self): - return self.item - -class CollectReport(BaseReport): - skipped = failed = passed = False - - def __init__(self, collector, result, excinfo=None): - self.collector = collector - if not excinfo: - self.passed = True - self.result = result - else: - style = "short" - if collector.config.getvalue("fulltrace"): - style = "long" - self.longrepr = self.collector._repr_failure_py(excinfo, - style=style) - if excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - self.reason = str(excinfo.value) - else: - self.failed = True - - def getnode(self): - return self.collector - -class TeardownErrorReport(BaseReport): - skipped = passed = False - failed = True - when = "teardown" - def __init__(self, excinfo): - self.longrepr = excinfo.getrepr(funcargs=True) - -class SetupState(object): - """ shared state for setting up/tearing down test items or collectors. """ - def __init__(self): - self.stack = [] - self._finalizers = {} - - def addfinalizer(self, finalizer, colitem): - """ attach a finalizer to the given colitem. - if colitem is None, this will add a finalizer that - is called at the end of teardown_all(). - """ - assert hasattr(finalizer, '__call__') - #assert colitem in self.stack - self._finalizers.setdefault(colitem, []).append(finalizer) - - def _pop_and_teardown(self): - colitem = self.stack.pop() - self._teardown_with_finalization(colitem) - - def _callfinalizers(self, colitem): - finalizers = self._finalizers.pop(colitem, None) - while finalizers: - fin = finalizers.pop() - fin() - - def _teardown_with_finalization(self, colitem): - self._callfinalizers(colitem) - if colitem: - colitem.teardown() - for colitem in self._finalizers: - assert colitem is None or colitem in self.stack - - def teardown_all(self): - while self.stack: - self._pop_and_teardown() - self._teardown_with_finalization(None) - assert not self._finalizers - - def teardown_exact(self, item): - if self.stack and item == self.stack[-1]: - self._pop_and_teardown() - else: - self._callfinalizers(item) - - def prepare(self, colitem): - """ setup objects along the collector chain to the test-method - and teardown previously setup objects.""" - needed_collectors = colitem.listchain() - while self.stack: - if self.stack == needed_collectors[:len(self.stack)]: - break - self._pop_and_teardown() - # check if the last collection node has raised an error - for col in self.stack: - if hasattr(col, '_prepare_exc'): - py.builtin._reraise(*col._prepare_exc) - for col in needed_collectors[len(self.stack):]: - self.stack.append(col) - try: - col.setup() - except Exception: - col._prepare_exc = sys.exc_info() - raise - -# ============================================================= -# Test OutcomeExceptions and helpers for creating them. - - -class OutcomeException(Exception): - """ OutcomeException and its subclass instances indicate and - contain info about test and collection outcomes. - """ - def __init__(self, msg=None, excinfo=None): - self.msg = msg - self.excinfo = excinfo - - def __repr__(self): - if self.msg: - return repr(self.msg) - return "<%s instance>" %(self.__class__.__name__,) - __str__ = __repr__ - -class Skipped(OutcomeException): - # XXX hackish: on 3k we fake to live in the builtins - # in order to have Skipped exception printing shorter/nicer - __module__ = 'builtins' - -class Failed(OutcomeException): - """ raised from an explicit call to py.test.fail() """ - __module__ = 'builtins' - -class XFailed(OutcomeException): - """ raised from an explicit call to py.test.xfail() """ - __module__ = 'builtins' - -class ExceptionFailure(Failed): - """ raised by py.test.raises on an exception-assertion mismatch. """ - def __init__(self, expr, expected, msg=None, excinfo=None): - Failed.__init__(self, msg=msg, excinfo=excinfo) - self.expr = expr - self.expected = expected - -class Exit(KeyboardInterrupt): - """ raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """ - def __init__(self, msg="unknown reason"): - self.msg = msg - KeyboardInterrupt.__init__(self, msg) - -# exposed helper methods - -def exit(msg): - """ exit testing process as if KeyboardInterrupt was triggered. """ - __tracebackhide__ = True - raise Exit(msg) - -exit.Exception = Exit - -def skip(msg=""): - """ skip an executing test with the given message. Note: it's usually - better use the py.test.mark.skipif marker to declare a test to be - skipped under certain conditions like mismatching platforms or - dependencies. See the pytest_skipping plugin for details. - """ - __tracebackhide__ = True - raise Skipped(msg=msg) - -skip.Exception = Skipped - -def fail(msg=""): - """ explicitely fail an currently-executing test with the given Message. """ - __tracebackhide__ = True - raise Failed(msg=msg) - -fail.Exception = Failed - -def xfail(reason=""): - """ xfail an executing test or setup functions, taking an optional - reason string. - """ - __tracebackhide__ = True - raise XFailed(reason) -xfail.Exception = XFailed - -def raises(ExpectedException, *args, **kwargs): - """ if args[0] is callable: raise AssertionError if calling it with - the remaining arguments does not raise the expected exception. - if args[0] is a string: raise AssertionError if executing the - the string in the calling scope does not raise expected exception. - for examples: - x = 5 - raises(TypeError, lambda x: x + 'hello', x=x) - raises(TypeError, "x + 'hello'") - """ - __tracebackhide__ = True - assert args - if isinstance(args[0], str): - code, = args - assert isinstance(code, str) - frame = sys._getframe(1) - loc = frame.f_locals.copy() - loc.update(kwargs) - #print "raises frame scope: %r" % frame.f_locals - try: - code = py.code.Source(code).compile() - py.builtin.exec_(code, frame.f_globals, loc) - # XXX didn'T mean f_globals == f_locals something special? - # this is destroyed here ... - except ExpectedException: - return py.code.ExceptionInfo() - else: - func = args[0] - try: - func(*args[1:], **kwargs) - except ExpectedException: - return py.code.ExceptionInfo() - k = ", ".join(["%s=%r" % x for x in kwargs.items()]) - if k: - k = ', ' + k - expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k) - raise ExceptionFailure(msg="DID NOT RAISE", - expr=args, expected=ExpectedException) - -raises.Exception = ExceptionFailure - -def importorskip(modname, minversion=None): - """ return imported module if it has a higher __version__ than the - optionally specified 'minversion' - otherwise call py.test.skip() - with a message detailing the mismatch. - """ - compile(modname, '', 'eval') # to catch syntaxerrors - try: - mod = __import__(modname, None, None, ['__doc__']) - except ImportError: - py.test.skip("could not import %r" %(modname,)) - if minversion is None: - return mod - verattr = getattr(mod, '__version__', None) - if isinstance(minversion, str): - minver = minversion.split(".") - else: - minver = list(minversion) - if verattr is None or verattr.split(".") < minver: - py.test.skip("module %r has __version__ %r, required is: %r" %( - modname, verattr, minversion)) - return mod - diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessability - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compability rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more agressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylin. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and applicationlevel parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/py/_compat/dep_doctest.py b/py/_compat/dep_doctest.py deleted file mode 100644 --- a/py/_compat/dep_doctest.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", -stacklevel="apipkg") -doctest = py.std.doctest diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/py/_plugin/pytest_pastebin.py b/py/_plugin/pytest_pastebin.py deleted file mode 100644 --- a/py/_plugin/pytest_pastebin.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -submit failure or test session information to a pastebin service. - -Usage ----------- - -**Creating a URL for each test failure**:: - - py.test --pastebin=failed - -This will submit test run information to a remote Paste service and -provide a URL for each failure. You may select tests as usual or add -for example ``-x`` if you only want to send one particular failure. - -**Creating a URL for a whole test session log**:: - - py.test --pastebin=all - -Currently only pasting to the http://paste.pocoo.org service is implemented. - -""" -import py, sys - -class url: - base = "http://paste.pocoo.org" - xmlrpc = base + "/xmlrpc/" - show = base + "/show/" - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group._addoption('--pastebin', metavar="mode", - action='store', dest="pastebin", default=None, - type="choice", choices=['failed', 'all'], - help="send failed|all info to Pocoo pastebin service.") - -def pytest_configure(__multicall__, config): - import tempfile - __multicall__.execute() - if config.option.pastebin == "all": - config._pastebinfile = tempfile.TemporaryFile('w+') - tr = config.pluginmanager.getplugin('terminalreporter') - oldwrite = tr._tw.write - def tee_write(s, **kwargs): - oldwrite(s, **kwargs) - config._pastebinfile.write(str(s)) - tr._tw.write = tee_write - -def pytest_unconfigure(config): - if hasattr(config, '_pastebinfile'): - config._pastebinfile.seek(0) - sessionlog = config._pastebinfile.read() - config._pastebinfile.close() - del config._pastebinfile - proxyid = getproxy().newPaste("python", sessionlog) - pastebinurl = "%s%s" % (url.show, proxyid) - sys.stderr.write("pastebin session-log: %s\n" % pastebinurl) - tr = config.pluginmanager.getplugin('terminalreporter') - del tr._tw.__dict__['write'] - -def getproxy(): - return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes - -def pytest_terminal_summary(terminalreporter): - if terminalreporter.config.option.pastebin != "failed": - return - tr = terminalreporter - if 'failed' in tr.stats: - terminalreporter.write_sep("=", "Sending information to Paste Service") - if tr.config.option.debug: - terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,)) - serverproxy = getproxy() - for rep in terminalreporter.stats.get('failed'): - try: - msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc - except AttributeError: - msg = tr._getfailureheadline(rep) - tw = py.io.TerminalWriter(stringio=True) - rep.toterminal(tw) - s = tw.stringio.getvalue() - assert len(s) - proxyid = serverproxy.newPaste("python", s) - pastebinurl = "%s%s" % (url.show, proxyid) - tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependecies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -20,8 +20,8 @@ from pypy.rlib.rarithmetic import intmask from pypy.rlib.objectmodel import specialize from pypy.jit.codewriter.jitcode import JitCode, SwitchDictDescr, MissingLiveness -from pypy.jit.codewriter import heaptracker -from pypy.jit.metainterp.optimizeutil import RetraceLoop +from pypy.jit.codewriter import heaptracker, longlong +from pypy.jit.metainterp.optimizeutil import RetraceLoop, args_dict_box, args_dict # ____________________________________________________________ @@ -839,7 +839,7 @@ jcposition, redboxes): resumedescr = compile.ResumeAtPositionDescr() self.capture_resumedata(resumedescr, orgpc) - + any_operation = len(self.metainterp.history.operations) > 0 jitdriver_sd = self.metainterp.staticdata.jitdrivers_sd[jdindex] self.verify_green_args(jitdriver_sd, greenboxes) @@ -857,7 +857,7 @@ "found a loop_header for a JitDriver that does not match " "the following jit_merge_point's") self.metainterp.seen_loop_header_for_jdindex = -1 - + # if not self.metainterp.in_recursion: assert jitdriver_sd is self.metainterp.jitdriver_sd @@ -1280,11 +1280,6 @@ self._addr2name_keys = [key for key, value in list_of_addr2name] self._addr2name_values = [value for key, value in list_of_addr2name] - def setup_jitdrivers_sd(self, optimizer): - if optimizer is not None: - for jd in self.jitdrivers_sd: - jd.warmstate.set_param_optimizer(optimizer) - def finish_setup(self, codewriter, optimizer=None): from pypy.jit.metainterp.blackhole import BlackholeInterpBuilder self.blackholeinterpbuilder = BlackholeInterpBuilder(codewriter, self) @@ -1298,7 +1293,6 @@ self.jitdrivers_sd = codewriter.callcontrol.jitdrivers_sd self.virtualref_info = codewriter.callcontrol.virtualref_info self.callinfocollection = codewriter.callcontrol.callinfocollection - self.setup_jitdrivers_sd(optimizer) # # store this information for fastpath of call_assembler # (only the paths that can actually be taken) @@ -1421,6 +1415,7 @@ self.free_frames_list = [] self.last_exc_value_box = None self.retracing_loop_from = None + self.call_pure_results = args_dict_box() def perform_call(self, jitcode, boxes, greenkey=None): # causes the metainterp to enter the given subfunction @@ -1428,10 +1423,13 @@ f.setup_call(boxes) raise ChangeFrame + def is_main_jitcode(self, jitcode): + return self.jitdriver_sd is not None and jitcode is self.jitdriver_sd.mainjitcode + def newframe(self, jitcode, greenkey=None): if jitcode.is_portal: self.in_recursion += 1 - if greenkey is not None: + if greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (greenkey, len(self.history.operations))) if len(self.free_frames_list) > 0: @@ -1444,9 +1442,10 @@ def popframe(self): frame = self.framestack.pop() - if frame.jitcode.is_portal: + jitcode = frame.jitcode + if jitcode.is_portal: self.in_recursion -= 1 - if frame.greenkey is not None: + if frame.greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (None, len(self.history.operations))) # we save the freed MIFrames to avoid needing to re-create new @@ -1478,7 +1477,7 @@ elif result_type == history.REF: raise sd.DoneWithThisFrameRef(self.cpu, resultbox.getref_base()) elif result_type == history.FLOAT: - raise sd.DoneWithThisFrameFloat(resultbox.getfloat()) + raise sd.DoneWithThisFrameFloat(resultbox.getfloatstorage()) else: assert False @@ -1637,6 +1636,7 @@ warmrunnerstate = self.jitdriver_sd.warmstate if len(self.history.operations) > warmrunnerstate.trace_limit: greenkey_of_huge_function = self.find_biggest_function() + self.staticdata.stats.record_aborted(greenkey_of_huge_function) self.portal_trace_positions = None if greenkey_of_huge_function is not None: warmrunnerstate.disable_noninlinable_function( @@ -1724,7 +1724,7 @@ dont_change_position = True else: dont_change_position = False - try: + try: self.prepare_resume_from_failure(key.guard_opnum, dont_change_position) if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(ABORT_BRIDGE) @@ -1918,7 +1918,8 @@ self.history.inputargs = original_boxes[num_green_args:] greenkey = original_boxes[:num_green_args] self.history.record(rop.JUMP, live_arg_boxes[num_green_args:], None) - loop_token = compile.compile_new_loop(self, [], greenkey, start, start_resumedescr) + loop_token = compile.compile_new_loop(self, [], greenkey, start, + start_resumedescr, False) self.history.operations.pop() # remove the JUMP if loop_token is None: return @@ -1928,12 +1929,12 @@ self.history.inputargs = original_inputargs self.history.operations = self.history.operations[:start] - live_arg_boxes = bridge_arg_boxes - - self.history.record(rop.JUMP, live_arg_boxes[num_green_args:], None) + + self.history.record(rop.JUMP, bridge_arg_boxes[num_green_args:], None) try: target_loop_token = compile.compile_new_bridge(self, - [loop_token], + #[loop_token], + old_loop_tokens, self.resumekey, True) except RetraceLoop: @@ -2283,7 +2284,9 @@ return resbox_as_const # not all constants (so far): turn CALL into CALL_PURE, which might # be either removed later by optimizeopt or turned back into CALL. - newop = op.copy_and_change(rop.CALL_PURE, args=[resbox_as_const]+op.getarglist()) + arg_consts = [a.constbox() for a in op.getarglist()] + self.call_pure_results[arg_consts] = resbox_as_const + newop = op.copy_and_change(rop.CALL_PURE, args=op.getarglist()) self.history.operations[-1] = newop return resbox diff --git a/pypy/jit/metainterp/test/test_ztranslation.py b/pypy/jit/metainterp/test/test_ztranslation.py --- a/pypy/jit/metainterp/test/test_ztranslation.py +++ b/pypy/jit/metainterp/test/test_ztranslation.py @@ -1,7 +1,7 @@ import py from pypy.jit.metainterp.warmspot import rpython_ll_meta_interp, ll_meta_interp from pypy.jit.backend.llgraph import runner -from pypy.rlib.jit import JitDriver, OPTIMIZER_FULL, unroll_parameters +from pypy.rlib.jit import JitDriver, unroll_parameters from pypy.rlib.jit import PARAMETERS, dont_look_inside, hint from pypy.jit.metainterp.jitprof import Profiler from pypy.rpython.lltypesystem import lltype, llmemory @@ -40,6 +40,12 @@ self.i = i self.l = [float(i)] + class OtherFrame(object): + _virtualizable2_ = ['i'] + + def __init__(self, i): + self.i = i + class JitCellCache: entry = None jitcellcache = JitCellCache() @@ -56,8 +62,7 @@ set_jitcell_at=set_jitcell_at, get_printable_location=get_printable_location) def f(i): - for param in unroll_parameters: - defl = PARAMETERS[param] + for param, defl in unroll_parameters: jitdriver.set_param(param, defl) jitdriver.set_param("threshold", 3) jitdriver.set_param("trace_eagerness", 2) @@ -72,8 +77,7 @@ frame.l[0] -= 1 return total * 10 # - myjitdriver2 = JitDriver(greens = ['g'], - reds = ['m', 's', 'f', 'float_s'], + myjitdriver2 = JitDriver(greens = ['g'], reds = ['m', 's', 'f'], virtualizables = ['f']) def f2(g, m, x): s = "" @@ -100,7 +104,6 @@ res = rpython_ll_meta_interp(main, [40, 5], CPUClass=self.CPUClass, type_system=self.type_system, - optimizer=OPTIMIZER_FULL, ProfilerClass=Profiler, listops=True) assert res == main(40, 5) @@ -141,7 +144,7 @@ assert res == main(40) res = rpython_ll_meta_interp(main, [40], CPUClass=self.CPUClass, type_system=self.type_system, - optimizer=OPTIMIZER_FULL, + enable_opts='', ProfilerClass=Profiler) assert res == main(40) diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimisations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/py/_cmdline/pysvnwcrevert.py b/py/_cmdline/pysvnwcrevert.py deleted file mode 100755 --- a/py/_cmdline/pysvnwcrevert.py +++ /dev/null @@ -1,55 +0,0 @@ -#! /usr/bin/env python -"""\ -py.svnwcrevert [options] WCPATH - -Running this script and then 'svn up' puts the working copy WCPATH in a state -as clean as a fresh check-out. - -WARNING: you'll loose all local changes, obviously! - -This script deletes all files that have been modified -or that svn doesn't explicitly know about, including svn:ignored files -(like .pyc files, hint hint). - -The goal of this script is to leave the working copy with some files and -directories possibly missing, but - most importantly - in a state where -the following 'svn up' won't just crash. -""" - -import sys, py - -def kill(p, root): - print('< %s' % (p.relto(root),)) - p.remove(rec=1) - -def svnwcrevert(path, root=None, precious=[]): - if root is None: - root = path - wcpath = py.path.svnwc(path) - try: - st = wcpath.status() - except ValueError: # typically, "bad char in wcpath" - kill(path, root) - return - for p in path.listdir(): - if p.basename == '.svn' or p.basename in precious: - continue - wcp = py.path.svnwc(p) - if wcp not in st.unchanged and wcp not in st.external: - kill(p, root) - elif p.check(dir=1): - svnwcrevert(p, root) - -# XXX add a functional test - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-p", "--precious", - action="append", dest="precious", default=[], - help="preserve files with this name") - -def main(): - opts, args = parser.parse_args() - if len(args) != 1: - parser.print_help() - sys.exit(2) - svnwcrevert(py.path.local(args[0]), precious=opts.precious) diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisities --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/py/apipkg.py b/py/apipkg.py deleted file mode 100644 --- a/py/apipkg.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -apipkg: control the exported namespace of a python package. - -see http://pypi.python.org/pypi/apipkg - -(c) holger krekel, 2009 - MIT license -""" -import sys -from types import ModuleType - -__version__ = "1.0b6" - -def initpkg(pkgname, exportdefs): - """ initialize given package from the export definitions. """ - mod = ApiModule(pkgname, exportdefs, implprefix=pkgname) - oldmod = sys.modules[pkgname] - mod.__file__ = getattr(oldmod, '__file__', None) - mod.__version__ = getattr(oldmod, '__version__', '0') - for name in ('__path__', '__loader__'): - if hasattr(oldmod, name): - setattr(mod, name, getattr(oldmod, name)) - sys.modules[pkgname] = mod - -def importobj(modpath, attrname): - module = __import__(modpath, None, None, ['__doc__']) - return getattr(module, attrname) - -class ApiModule(ModuleType): - def __init__(self, name, importspec, implprefix=None): - self.__name__ = name - self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] - self.__map__ = {} - self.__implprefix__ = implprefix or name - for name, importspec in importspec.items(): - if isinstance(importspec, dict): - subname = '%s.%s'%(self.__name__, name) - apimod = ApiModule(subname, importspec, implprefix) - sys.modules[subname] = apimod - setattr(self, name, apimod) - else: - modpath, attrname = importspec.split(':') - if modpath[0] == '.': - modpath = implprefix + modpath - if name == '__doc__': - self.__doc__ = importobj(modpath, attrname) - else: - self.__map__[name] = (modpath, attrname) - - def __repr__(self): - l = [] - if hasattr(self, '__version__'): - l.append("version=" + repr(self.__version__)) - if hasattr(self, '__file__'): - l.append('from ' + repr(self.__file__)) - if l: - return '' % (self.__name__, " ".join(l)) - return '' % (self.__name__,) - - def __makeattr(self, name): - """lazily compute value for name or raise AttributeError if unknown.""" - target = None - if '__onfirstaccess__' in self.__map__: - target = self.__map__.pop('__onfirstaccess__') - importobj(*target)() - try: - modpath, attrname = self.__map__[name] - except KeyError: - if target is not None and name != '__onfirstaccess__': - # retry, onfirstaccess might have set attrs - return getattr(self, name) - raise AttributeError(name) - else: - result = importobj(modpath, attrname) - setattr(self, name, result) - try: - del self.__map__[name] - except KeyError: - pass # in a recursive-import situation a double-del can happen - return result - - __getattr__ = __makeattr - - def __dict__(self): - # force all the content of the module to be loaded when __dict__ is read - dictdescr = ModuleType.__dict__['__dict__'] - dict = dictdescr.__get__(self) - if dict is not None: - hasattr(self, 'some') - for name in self.__all__: - try: - self.__makeattr(name) - except AttributeError: - pass - return dict - __dict__ = property(__dict__) diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/py/bin/env.py b/py/bin/env.py deleted file mode 100644 --- a/py/bin/env.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys, os, os.path - -progpath = sys.argv[0] -packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath))) -packagename = os.path.basename(packagedir) -bindir = os.path.join(packagedir, 'bin') -if sys.platform == 'win32': - bindir = os.path.join(bindir, 'win32') -rootdir = os.path.dirname(packagedir) - -def prepend_path(name, value): - sep = os.path.pathsep - curpath = os.environ.get(name, '') - newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ] - return setenv(name, sep.join(newpath)) - -def setenv(name, value): - shell = os.environ.get('SHELL', '') - comspec = os.environ.get('COMSPEC', '') - if shell.endswith('csh'): - cmd = 'setenv %s "%s"' % (name, value) - elif shell.endswith('sh'): - cmd = '%s="%s"; export %s' % (name, value, name) - elif comspec.endswith('cmd.exe'): - cmd = 'set %s=%s' % (name, value) - else: - assert False, 'Shell not supported.' - return cmd - -print(prepend_path('PATH', bindir)) -print(prepend_path('PYTHONPATH', rootdir)) diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/py/_test/funcargs.py b/py/_test/funcargs.py deleted file mode 100644 --- a/py/_test/funcargs.py +++ /dev/null @@ -1,176 +0,0 @@ -import py - -def getfuncargnames(function): - argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0] - startindex = py.std.inspect.ismethod(function) and 1 or 0 - defaults = getattr(function, 'func_defaults', - getattr(function, '__defaults__', None)) or () - numdefaults = len(defaults) - if numdefaults: - return argnames[startindex:-numdefaults] - return argnames[startindex:] - -def fillfuncargs(function): - """ fill missing funcargs. """ - request = FuncargRequest(pyfuncitem=function) - request._fillfuncargs() - -def getplugins(node, withpy=False): # might by any node - plugins = node.config._getmatchingplugins(node.fspath) - if withpy: - mod = node.getparent(py.test.collect.Module) - if mod is not None: - plugins.append(mod.obj) - inst = node.getparent(py.test.collect.Instance) - if inst is not None: - plugins.append(inst.obj) - return plugins - -_notexists = object() -class CallSpec: - def __init__(self, funcargs, id, param): - self.funcargs = funcargs - self.id = id - if param is not _notexists: - self.param = param - def __repr__(self): - return "" %( - self.id, getattr(self, 'param', '?'), self.funcargs) - -class Metafunc: - def __init__(self, function, config=None, cls=None, module=None): - self.config = config - self.module = module - self.function = function - self.funcargnames = getfuncargnames(function) - self.cls = cls - self.module = module - self._calls = [] - self._ids = py.builtin.set() - - def addcall(self, funcargs=None, id=_notexists, param=_notexists): - assert funcargs is None or isinstance(funcargs, dict) - if id is None: - raise ValueError("id=None not allowed") - if id is _notexists: - id = len(self._calls) - id = str(id) - if id in self._ids: - raise ValueError("duplicate id %r" % id) - self._ids.add(id) - self._calls.append(CallSpec(funcargs, id, param)) - -class FuncargRequest: - _argprefix = "pytest_funcarg__" - _argname = None - - class LookupError(LookupError): - """ error on performing funcarg request. """ - - def __init__(self, pyfuncitem): - self._pyfuncitem = pyfuncitem - self.function = pyfuncitem.obj - self.module = pyfuncitem.getparent(py.test.collect.Module).obj - clscol = pyfuncitem.getparent(py.test.collect.Class) - self.cls = clscol and clscol.obj or None - self.instance = py.builtin._getimself(self.function) - self.config = pyfuncitem.config - self.fspath = pyfuncitem.fspath - if hasattr(pyfuncitem, '_requestparam'): - self.param = pyfuncitem._requestparam - self._plugins = getplugins(pyfuncitem, withpy=True) - self._funcargs = self._pyfuncitem.funcargs.copy() - self._name2factory = {} - self._currentarg = None - - def _fillfuncargs(self): - argnames = getfuncargnames(self.function) - if argnames: - assert not getattr(self._pyfuncitem, '_args', None), ( - "yielded functions cannot have funcargs") - for argname in argnames: - if argname not in self._pyfuncitem.funcargs: - self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname) - - def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): - """ cache and return result of calling setup(). - - The requested argument name, the scope and the ``extrakey`` - determine the cache key. The scope also determines when - teardown(result) will be called. valid scopes are: - scope == 'function': when the single test function run finishes. - scope == 'module': when tests in a different module are run - scope == 'session': when tests of the session have run. - """ - if not hasattr(self.config, '_setupcache'): - self.config._setupcache = {} # XXX weakref? - cachekey = (self._currentarg, self._getscopeitem(scope), extrakey) - cache = self.config._setupcache - try: - val = cache[cachekey] - except KeyError: - val = setup() - cache[cachekey] = val - if teardown is not None: - def finalizer(): - del cache[cachekey] - teardown(val) - self._addfinalizer(finalizer, scope=scope) - return val - - def getfuncargvalue(self, argname): - try: - return self._funcargs[argname] - except KeyError: - pass - if argname not in self._name2factory: - self._name2factory[argname] = self.config.pluginmanager.listattr( - plugins=self._plugins, - attrname=self._argprefix + str(argname) - ) - #else: we are called recursively - if not self._name2factory[argname]: - self._raiselookupfailed(argname) - funcargfactory = self._name2factory[argname].pop() - oldarg = self._currentarg - self._currentarg = argname - try: - self._funcargs[argname] = res = funcargfactory(request=self) - finally: - self._currentarg = oldarg - return res - - def _getscopeitem(self, scope): - if scope == "function": - return self._pyfuncitem - elif scope == "module": - return self._pyfuncitem.getparent(py.test.collect.Module) - elif scope == "session": - return None - raise ValueError("unknown finalization scope %r" %(scope,)) - - def _addfinalizer(self, finalizer, scope): - colitem = self._getscopeitem(scope) - self.config._setupstate.addfinalizer( - finalizer=finalizer, colitem=colitem) - - def addfinalizer(self, finalizer): - """ call the given finalizer after test function finished execution. """ - self._addfinalizer(finalizer, scope="function") - - def __repr__(self): - return "" %(self._pyfuncitem) - - def _raiselookupfailed(self, argname): - available = [] - for plugin in self._plugins: - for name in vars(plugin): - if name.startswith(self._argprefix): - name = name[len(self._argprefix):] - if name not in available: - available.append(name) - fspath, lineno, msg = self._pyfuncitem.reportinfo() - msg = "LookupError: no factory found for function argument %r" % (argname,) - msg += "\n available funcargs: %s" %(", ".join(available),) - msg += "\n use 'py.test --funcargs [testpath]' for help on them." - raise self.LookupError(msg) diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/py/_path/gateway/channeltest2.py b/py/_path/gateway/channeltest2.py deleted file mode 100644 --- a/py/_path/gateway/channeltest2.py +++ /dev/null @@ -1,21 +0,0 @@ -import py -from remotepath import RemotePath - - -SRC = open('channeltest.py', 'r').read() - -SRC += ''' -import py -srv = PathServer(channel.receive()) -channel.send(srv.p2c(py.path.local("/tmp"))) -''' - - -#gw = execnet.SshGateway('codespeak.net') -gw = execnet.PopenGateway() -gw.remote_init_threads(5) -c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr) -subchannel = gw._channelfactory.new() -c.send(subchannel) - -p = RemotePath(subchannel, c.receive()) diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -38,6 +38,7 @@ from pypy.jit.backend.x86.jump import remap_frame_layout from pypy.jit.metainterp.history import ConstInt, BoxInt from pypy.jit.codewriter.effectinfo import EffectInfo +from pypy.jit.codewriter import longlong # darwin requires the stack to be 16 bytes aligned on calls. Same for gcc 4.5.0, # better safe than sorry @@ -71,7 +72,8 @@ self.malloc_unicode_func_addr = 0 self.fail_boxes_int = values_array(lltype.Signed, failargs_limit) self.fail_boxes_ptr = values_array(llmemory.GCREF, failargs_limit) - self.fail_boxes_float = values_array(lltype.Float, failargs_limit) + self.fail_boxes_float = values_array(longlong.FLOATSTORAGE, + failargs_limit) self.fail_ebp = 0 self.loop_run_counters = [] self.float_const_neg_addr = 0 @@ -696,11 +698,9 @@ else: target = tmp if inputargs[i].type == REF: - # This uses XCHG to put zeroes in fail_boxes_ptr after - # reading them - self.mc.XOR(target, target) adr = self.fail_boxes_ptr.get_addr_for_num(i) - self.mc.XCHG(target, heap(adr)) + self.mc.MOV(target, heap(adr)) + self.mc.MOV(heap(adr), imm0) else: adr = self.fail_boxes_int.get_addr_for_num(i) self.mc.MOV(target, heap(adr)) @@ -1157,37 +1157,24 @@ not_implemented("llong_to_int: %s" % (loc,)) def genop_llong_from_int(self, op, arglocs, resloc): - loc = arglocs[0] - if isinstance(loc, ConstFloatLoc): - self.mc.MOVSD(resloc, loc) - else: - assert loc is eax - assert isinstance(resloc, RegLoc) - loc2 = arglocs[1] - assert isinstance(loc2, RegLoc) - self.mc.CDQ() # eax -> eax:edx - self.mc.MOVD_xr(resloc.value, eax.value) - self.mc.MOVD_xr(loc2.value, edx.value) - self.mc.PUNPCKLDQ_xx(resloc.value, loc2.value) - - def genop_llong_from_two_ints(self, op, arglocs, resloc): - assert isinstance(resloc, RegLoc) - loc1, loc2, loc3 = arglocs - # + loc1, loc2 = arglocs if isinstance(loc1, ConstFloatLoc): + assert loc2 is None self.mc.MOVSD(resloc, loc1) else: assert isinstance(loc1, RegLoc) + assert isinstance(loc2, RegLoc) + assert isinstance(resloc, RegLoc) + self.mc.MOVD_xr(loc2.value, loc1.value) + self.mc.PSRAD_xi(loc2.value, 31) # -> 0 or -1 self.mc.MOVD_xr(resloc.value, loc1.value) - # - if loc2 is not None: - assert isinstance(loc3, RegLoc) - if isinstance(loc2, ConstFloatLoc): - self.mc.MOVSD(loc3, loc2) - else: - assert isinstance(loc2, RegLoc) - self.mc.MOVD_xr(loc3.value, loc2.value) - self.mc.PUNPCKLDQ_xx(resloc.value, loc3.value) + self.mc.PUNPCKLDQ_xx(resloc.value, loc2.value) + + def genop_llong_from_uint(self, op, arglocs, resloc): + loc1, = arglocs + assert isinstance(resloc, RegLoc) + assert isinstance(loc1, RegLoc) + self.mc.MOVD_xr(resloc.value, loc1.value) def genop_llong_eq(self, op, arglocs, resloc): loc1, loc2, locxtmp = arglocs @@ -1833,11 +1820,13 @@ if IS_X86_32 and isinstance(resloc, StackLoc) and resloc.width == 8: # a float or a long long return - from pypy.jit.backend.llsupport.descr import LongLongCallDescr - if isinstance(op.getdescr(), LongLongCallDescr): + if op.getdescr().get_return_type() == 'L': self.mc.MOV_br(resloc.value, eax.value) # long long self.mc.MOV_br(resloc.value + 4, edx.value) - # XXX should ideally not move the result on the stack + # XXX should ideally not move the result on the stack, + # but it's a mess to load eax/edx into a xmm register + # and this way is simpler also because the result loc + # can just be always a stack location else: self.mc.FSTP_b(resloc.value) # float return elif size == WORD: @@ -1936,8 +1925,8 @@ self.mc.MOV(eax, heap(adr)) elif kind == REF: adr = self.fail_boxes_ptr.get_addr_for_num(0) - self.mc.XOR_rr(eax.value, eax.value) - self.mc.XCHG(eax, heap(adr)) + self.mc.MOV(eax, heap(adr)) + self.mc.MOV(heap(adr), imm0) else: raise AssertionError(kind) # diff --git a/py/_compat/dep_textwrap.py b/py/_compat/dep_textwrap.py deleted file mode 100644 --- a/py/_compat/dep_textwrap.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", - stacklevel="apipkg") -textwrap = py.std.textwrap diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/py/_plugin/pytest_pylint.py b/py/_plugin/pytest_pylint.py deleted file mode 100644 --- a/py/_plugin/pytest_pylint.py +++ /dev/null @@ -1,36 +0,0 @@ -"""pylint plugin - -XXX: Currently in progress, NOT IN WORKING STATE. -""" -import py - -pylint = py.test.importorskip("pylint.lint") - -def pytest_addoption(parser): - group = parser.getgroup('pylint options') - group.addoption('--pylint', action='store_true', - default=False, dest='pylint', - help='run pylint on python files.') - -def pytest_collect_file(path, parent): - if path.ext == ".py": - if parent.config.getvalue('pylint'): - return PylintItem(path, parent) - -#def pytest_terminal_summary(terminalreporter): -# print 'placeholder for pylint output' - -class PylintItem(py.test.collect.Item): - def runtest(self): - capture = py.io.StdCaptureFD() - try: - linter = pylint.lint.PyLinter() - linter.check(str(self.fspath)) - finally: - out, err = capture.reset() - rating = out.strip().split('\n')[-1] - sys.stdout.write(">>>") - print(rating) - assert 0 - - diff --git a/py/_cmdline/pytest.py b/py/_cmdline/pytest.py deleted file mode 100755 --- a/py/_cmdline/pytest.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -import py - -def main(args=None): - raise SystemExit(py.test.cmdline.main(args)) diff --git a/py/_test/config.py b/py/_test/config.py deleted file mode 100644 --- a/py/_test/config.py +++ /dev/null @@ -1,291 +0,0 @@ -import py, os -from py._test.conftesthandle import Conftest -from py._test.pluginmanager import PluginManager -from py._test import parseopt -from py._test.collect import RootCollector - -def ensuretemp(string, dir=1): - """ (deprecated) return temporary directory path with - the given string as the trailing part. It is usually - better to use the 'tmpdir' function argument which will - take care to provide empty unique directories for each - test call even if the test is called multiple times. - """ - #py.log._apiwarn(">1.1", "use tmpdir function argument") - return py.test.config.ensuretemp(string, dir=dir) - -class CmdOptions(object): - """ holds cmdline options as attributes.""" - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - def __repr__(self): - return "" %(self.__dict__,) - -class Error(Exception): - """ Test Configuration Error. """ - -class Config(object): - """ access to config values, pluginmanager and plugin hooks. """ - Option = py.std.optparse.Option - Error = Error - basetemp = None - _sessionclass = None - - def __init__(self, topdir=None, option=None): - self.option = option or CmdOptions() - self.topdir = topdir - self._parser = parseopt.Parser( - usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]", - processopt=self._processopt, - ) - self.pluginmanager = PluginManager() - self._conftest = Conftest(onimport=self._onimportconftest) - self.hook = self.pluginmanager.hook - - def _onimportconftest(self, conftestmodule): - self.trace("loaded conftestmodule %r" %(conftestmodule,)) - self.pluginmanager.consider_conftest(conftestmodule) - - def _getmatchingplugins(self, fspath): - allconftests = self._conftest._conftestpath2mod.values() - plugins = [x for x in self.pluginmanager.getplugins() - if x not in allconftests] - plugins += self._conftest.getconftestmodules(fspath) - return plugins - - def trace(self, msg): - if getattr(self.option, 'traceconfig', None): - self.hook.pytest_trace(category="config", msg=msg) - - def _processopt(self, opt): - if hasattr(opt, 'default') and opt.dest: - val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None) - if val is not None: - if opt.type == "int": - val = int(val) - elif opt.type == "long": - val = long(val) - elif opt.type == "float": - val = float(val) - elif not opt.type and opt.action in ("store_true", "store_false"): - val = eval(val) - opt.default = val - else: - name = "option_" + opt.dest - try: - opt.default = self._conftest.rget(name) - except (ValueError, KeyError): - pass - if not hasattr(self.option, opt.dest): - setattr(self.option, opt.dest, opt.default) - - def _preparse(self, args): - self.pluginmanager.consider_setuptools_entrypoints() - self.pluginmanager.consider_env() - self.pluginmanager.consider_preparse(args) - self._conftest.setinitial(args) - self.pluginmanager.do_addoption(self._parser) - - def parse(self, args): - """ parse cmdline arguments into this config object. - Note that this can only be called once per testing process. - """ - assert not hasattr(self, 'args'), ( - "can only parse cmdline args at most once per Config object") - self._preparse(args) - self._parser.hints.extend(self.pluginmanager._hints) - args = self._parser.parse_setoption(args, self.option) - if not args: - args.append(py.std.os.getcwd()) - self.topdir = gettopdir(args) - self._rootcol = RootCollector(config=self) - self._setargs(args) - - def _setargs(self, args): - self.args = list(args) - self._argfspaths = [py.path.local(decodearg(x)[0]) for x in args] - - # config objects are usually pickled across system - # barriers but they contain filesystem paths. - # upon getstate/setstate we take care to do everything - # relative to "topdir". - def __getstate__(self): - l = [] - for path in self.args: - path = py.path.local(path) - l.append(path.relto(self.topdir)) - return l, self.option.__dict__ - - def __setstate__(self, repr): - # we have to set py.test.config because loading - # of conftest files may use it (deprecated) - # mainly by py.test.config.addoptions() - global config_per_process - py.test.config = config_per_process = self - args, cmdlineopts = repr - cmdlineopts = CmdOptions(**cmdlineopts) - # next line will registers default plugins - self.__init__(topdir=py.path.local(), option=cmdlineopts) - self._rootcol = RootCollector(config=self) - args = [str(self.topdir.join(x)) for x in args] - self._preparse(args) - self._setargs(args) - - def ensuretemp(self, string, dir=True): - return self.getbasetemp().ensure(string, dir=dir) - - def getbasetemp(self): - if self.basetemp is None: - basetemp = self.option.basetemp - if basetemp: - basetemp = py.path.local(basetemp) - if not basetemp.check(dir=1): - basetemp.mkdir() - else: - basetemp = py.path.local.make_numbered_dir(prefix='pytest-') - self.basetemp = basetemp - return self.basetemp - - def mktemp(self, basename, numbered=False): - basetemp = self.getbasetemp() - if not numbered: - return basetemp.mkdir(basename) - else: - return py.path.local.make_numbered_dir(prefix=basename, - keep=0, rootdir=basetemp, lock_timeout=None) - - def getinitialnodes(self): - return [self.getnode(arg) for arg in self.args] - - def getnode(self, arg): - parts = decodearg(arg) - path = py.path.local(parts.pop(0)) - if not path.check(): - raise self.Error("file not found: %s" %(path,)) - topdir = self.topdir - if path != topdir and not path.relto(topdir): - raise self.Error("path %r is not relative to %r" % - (str(path), str(topdir))) - # assumtion: pytest's fs-collector tree follows the filesystem tree - names = list(filter(None, path.relto(topdir).split(path.sep))) - names += parts - try: - return self._rootcol.getbynames(names) - except ValueError: - e = py.std.sys.exc_info()[1] - raise self.Error("can't collect: %s\n%s" % (arg, e.args[0])) - - def _getcollectclass(self, name, path): - try: - cls = self._conftest.rget(name, path) - except KeyError: - return getattr(py.test.collect, name) - else: - py.log._apiwarn(">1.1", "%r was found in a conftest.py file, " - "use pytest_collect hooks instead." % (cls,)) - return cls - - def getconftest_pathlist(self, name, path=None): - """ return a matching value, which needs to be sequence - of filenames that will be returned as a list of Path - objects (they can be relative to the location - where they were found). - """ - try: - mod, relroots = self._conftest.rget_with_confmod(name, path) - except KeyError: - return None - modpath = py.path.local(mod.__file__).dirpath() - l = [] - for relroot in relroots: - if not isinstance(relroot, py.path.local): - relroot = relroot.replace("/", py.path.local.sep) - relroot = modpath.join(relroot, abs=True) - l.append(relroot) - return l - - def addoptions(self, groupname, *specs): - """ add a named group of options to the current testing session. - This function gets invoked during testing session initialization. - """ - py.log._apiwarn("1.0", "define pytest_addoptions(parser) to add options", stacklevel=2) - group = self._parser.getgroup(groupname) - for opt in specs: - group._addoption_instance(opt) - return self.option - - def addoption(self, *optnames, **attrs): - return self._parser.addoption(*optnames, **attrs) - - def getvalueorskip(self, name, path=None): - """ return getvalue() or call py.test.skip if no value exists. """ - try: - val = self.getvalue(name, path) - if val is None: - raise KeyError(name) - return val - except KeyError: - py.test.skip("no %r value found" %(name,)) - - def getvalue(self, name, path=None): - """ return 'name' value looked up from the 'options' - and then from the first conftest file found up - the path (including the path itself). - if path is None, lookup the value in the initial - conftest modules found during command line parsing. - """ - try: - return getattr(self.option, name) - except AttributeError: - return self._conftest.rget(name, path) - - def setsessionclass(self, cls): - if self._sessionclass is not None: - raise ValueError("sessionclass already set to: %r" %( - self._sessionclass)) - self._sessionclass = cls - - def initsession(self): - """ return an initialized session object. """ - cls = self._sessionclass - if cls is None: - from py._test.session import Session - cls = Session - session = cls(self) - self.trace("instantiated session %r" % session) - return session - -# -# helpers -# - -def gettopdir(args): - """ return the top directory for the given paths. - if the common base dir resides in a python package - parent directory of the root package is returned. - """ - fsargs = [py.path.local(decodearg(arg)[0]) for arg in args] - p = fsargs and fsargs[0] or None - for x in fsargs[1:]: - p = p.common(x) - assert p, "cannot determine common basedir of %s" %(fsargs,) - pkgdir = p.pypkgpath() - if pkgdir is None: - if p.check(file=1): - p = p.dirpath() - return p - else: - return pkgdir.dirpath() - -def decodearg(arg): - arg = str(arg) - return arg.split("::") - -def onpytestaccess(): - # it's enough to have our containing module loaded as - # it initializes a per-process config instance - # which loads default plugins which add to py.test.* - pass - -# a default per-process instance of py.test configuration -config_per_process = Config() diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overriden later). diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/py/_plugin/__init__.py b/py/_plugin/__init__.py deleted file mode 100644 --- a/py/_plugin/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_test/conftesthandle.py b/py/_test/conftesthandle.py deleted file mode 100644 --- a/py/_test/conftesthandle.py +++ /dev/null @@ -1,113 +0,0 @@ -import py - -class Conftest(object): - """ the single place for accessing values and interacting - towards conftest modules from py.test objects. - - (deprecated) - Note that triggering Conftest instances to import - conftest.py files may result in added cmdline options. - """ - def __init__(self, onimport=None, confcutdir=None): - self._path2confmods = {} - self._onimport = onimport - self._conftestpath2mod = {} - self._confcutdir = confcutdir - - def setinitial(self, args): - """ try to find a first anchor path for looking up global values - from conftests. This function is usually called _before_ - argument parsing. conftest files may add command line options - and we thus have no completely safe way of determining - which parts of the arguments are actually related to options - and which are file system paths. We just try here to get - bootstrapped ... - """ - current = py.path.local() - opt = '--confcutdir' - for i in range(len(args)): - opt1 = str(args[i]) - if opt1.startswith(opt): - if opt1 == opt: - if len(args) > i: - p = current.join(args[i+1], abs=True) - elif opt1.startswith(opt + "="): - p = current.join(opt1[len(opt)+1:], abs=1) - self._confcutdir = p - break - for arg in args + [current]: - anchor = current.join(arg, abs=1) - if anchor.check(): # we found some file object - self._path2confmods[None] = self.getconftestmodules(anchor) - # let's also consider test* dirs - if anchor.check(dir=1): - for x in anchor.listdir(lambda x: x.check(dir=1, dotfile=0)): - self.getconftestmodules(x) - break - else: - assert 0, "no root of filesystem?" - - def getconftestmodules(self, path): - """ return a list of imported conftest modules for the given path. """ - try: - clist = self._path2confmods[path] - except KeyError: - if path is None: - raise ValueError("missing default confest.") - dp = path.dirpath() - if dp == path: - clist = [] - else: - cutdir = self._confcutdir - clist = self.getconftestmodules(dp) - if cutdir and path != cutdir and not path.relto(cutdir): - pass - else: - conftestpath = path.join("conftest.py") - if conftestpath.check(file=1): - clist.append(self.importconftest(conftestpath)) - self._path2confmods[path] = clist - # be defensive: avoid changes from caller side to - # affect us by always returning a copy of the actual list - return clist[:] - - def rget(self, name, path=None): - mod, value = self.rget_with_confmod(name, path) - return value - - def rget_with_confmod(self, name, path=None): - modules = self.getconftestmodules(path) - modules.reverse() - for mod in modules: - try: - return mod, getattr(mod, name) - except AttributeError: - continue - raise KeyError(name) - - def importconftest(self, conftestpath): - assert conftestpath.check(), conftestpath - try: - return self._conftestpath2mod[conftestpath] - except KeyError: - if not conftestpath.dirpath('__init__.py').check(file=1): - # HACK: we don't want any "globally" imported conftest.py, - # prone to conflicts and subtle problems - modname = str(conftestpath).replace('.', conftestpath.sep) - mod = conftestpath.pyimport(modname=modname) - else: - mod = conftestpath.pyimport() - self._conftestpath2mod[conftestpath] = mod - dirpath = conftestpath.dirpath() - if dirpath in self._path2confmods: - for path, mods in self._path2confmods.items(): - if path and path.relto(dirpath) or path == dirpath: - assert mod not in mods - mods.append(mod) - self._postimport(mod) - return mod - - def _postimport(self, mod): - if self._onimport: - self._onimport(mod) - return mod diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -27,12 +27,13 @@ # --allworkingmodules working_modules = default_modules.copy() working_modules.update(dict.fromkeys( - ["_socket", "unicodedata", "mmap", "fcntl", + ["_socket", "unicodedata", "mmap", "fcntl", "_locale", "rctime" , "select", "zipimport", "_lsprof", "crypt", "signal", "_rawffi", "termios", "zlib", "bz2", "struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", - "_bisect", "binascii", "_multiprocessing", '_warnings', 'micronumpy'] + "_bisect", "binascii", "_multiprocessing", '_warnings', + "_collections", , 'micronumpy'] )) translation_modules = default_modules.copy() @@ -79,8 +80,7 @@ "_rawffi": [("objspace.usemodules.struct", True)], "cpyext": [("translation.secondaryentrypoints", "cpyext"), ("translation.shared", sys.platform == "win32")], - "_ffi": [("translation.jit_ffi", True)], - } +} module_import_dependencies = { # no _rawffi if importing pypy.rlib.clibffi raises ImportError @@ -351,7 +351,7 @@ config.objspace.std.suggest(builtinshortcut=True) config.objspace.std.suggest(optimized_list_getitem=True) config.objspace.std.suggest(getattributeshortcut=True) - config.objspace.std.suggest(newshortcut=True) + config.objspace.std.suggest(newshortcut=True) if not IS_64_BITS: config.objspace.std.suggest(withsmalllong=True) diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Sat Mar 19 00:06:21 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Sat, 19 Mar 2011 00:06:21 +0100 (CET) Subject: [pypy-svn] pypy default: inline testrunner Message-ID: <20110318230621.C299F36C20D@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42799:b0feedf8d4d8 Date: 2011-03-19 00:05 +0100 http://bitbucket.org/pypy/pypy/changeset/b0feedf8d4d8/ Log: inline testrunner diff --git a/testrunner/test/examples/normal/example.py b/testrunner/test/examples/normal/example.py new file mode 100644 --- /dev/null +++ b/testrunner/test/examples/normal/example.py @@ -0,0 +1,18 @@ + +def test_one(): + assert 1 == 10/10 + +def test_two(): + assert 2 == 3 + +def test_three(): + assert "hello" == "world" + +def test_many(): + for i in range(100): + yield test_one, + +class TestStuff: + + def test_final(self): + crash diff --git a/testrunner/test/examples/normal/example_importerror.py b/testrunner/test/examples/normal/example_importerror.py new file mode 100644 --- /dev/null +++ b/testrunner/test/examples/normal/example_importerror.py @@ -0,0 +1,1 @@ +print 1/0 diff --git a/.hgsub b/.hgsub --- a/.hgsub +++ b/.hgsub @@ -1,3 +1,2 @@ greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -testrunner = [svn]http://codespeak.net/svn/pypy/build/testrunner lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/testrunner/test/examples/normal/failingsetup.py b/testrunner/test/examples/normal/failingsetup.py new file mode 100644 --- /dev/null +++ b/testrunner/test/examples/normal/failingsetup.py @@ -0,0 +1,6 @@ + +def setup_module(mod): + raise RuntimeError + +def test_bar(self): + assert True diff --git a/testrunner/test/test_runner.py b/testrunner/test/test_runner.py new file mode 100644 --- /dev/null +++ b/testrunner/test/test_runner.py @@ -0,0 +1,423 @@ +import py, sys, os, signal, cStringIO, tempfile + +import runner +import pypy + +pytest_script = py.path.local(pypy.__file__).dirpath('test_all.py') + + +def test_busywait(): + class FakeProcess: + def poll(self): + if timers[0] >= timers[1]: + return 42 + return None + class FakeTime: + def sleep(self, delay): + timers[0] += delay + def time(self): + timers[2] += 1 + return 12345678.9 + timers[0] + p = FakeProcess() + prevtime = runner.time + try: + runner.time = FakeTime() + # + timers = [0.0, 0.0, 0] + returncode = runner.busywait(p, 10) + assert returncode == 42 and 0.0 <= timers[0] <= 1.0 + # + timers = [0.0, 3.0, 0] + returncode = runner.busywait(p, 10) + assert returncode == 42 and 3.0 <= timers[0] <= 5.0 and timers[2] <= 10 + # + timers = [0.0, 500.0, 0] + returncode = runner.busywait(p, 1000) + assert returncode == 42 and 500.0<=timers[0]<=510.0 and timers[2]<=100 + # + timers = [0.0, 500.0, 0] + returncode = runner.busywait(p, 100) # get a timeout + assert returncode == None and 100.0 <= timers[0] <= 110.0 + # + finally: + runner.time = prevtime + +def test_should_report_failure(): + should_report_failure = runner.should_report_failure + assert should_report_failure("") + assert should_report_failure(". Abc\n. Def\n") + assert should_report_failure("s Ghi\n") + assert not should_report_failure(". Abc\nF Def\n") + assert not should_report_failure(". Abc\nE Def\n") + assert not should_report_failure(". Abc\nP Def\n") + assert not should_report_failure("F Def\n. Ghi\n. Jkl\n") + + +class TestRunHelper(object): + + def setup_method(self, meth): + h, self.fn = tempfile.mkstemp() + os.close(h) + + def teardown_method(self, meth): + os.unlink(self.fn) + + def test_run(self): + res = runner.run([sys.executable, "-c", "print 42"], '.', + py.path.local(self.fn)) + assert res == 0 + out = py.path.local(self.fn).read('r') + assert out == "42\n" + + def test_error(self): + res = runner.run([sys.executable, "-c", "import sys; sys.exit(3)"], '.', py.path.local(self.fn)) + assert res == 3 + + def test_signal(self): + if sys.platform == 'win32': + py.test.skip("no death by signal on windows") + res = runner.run([sys.executable, "-c", "import os; os.kill(os.getpid(), 9)"], '.', py.path.local(self.fn)) + assert res == -9 + + def test_timeout(self): + res = runner.run([sys.executable, "-c", "while True: pass"], '.', py.path.local(self.fn), timeout=3) + assert res == -999 + + def test_timeout_lock(self): + res = runner.run([sys.executable, "-c", "import threading; l=threading.Lock(); l.acquire(); l.acquire()"], '.', py.path.local(self.fn), timeout=3) + assert res == -999 + + def test_timeout_syscall(self): + res = runner.run([sys.executable, "-c", "import socket; s=s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM); s.bind(('', 0)); s.recv(1000)"], '.', py.path.local(self.fn), timeout=3) + assert res == -999 + + def test_timeout_success(self): + res = runner.run([sys.executable, "-c", "print 42"], '.', + py.path.local(self.fn), timeout=2) + assert res == 0 + out = py.path.local(self.fn).read('r') + assert out == "42\n" + + +class TestExecuteTest(object): + + def setup_class(cls): + cls.real_run = (runner.run,) + cls.called = [] + cls.exitcode = [0] + + def fake_run(args, cwd, out, timeout): + cls.called = (args, cwd, out, timeout) + return cls.exitcode[0] + runner.run = fake_run + + def teardown_class(cls): + runner.run = cls.real_run[0] + + def test_explicit(self): + res = runner.execute_test('/wd', 'test_one', 'out', 'LOGFILE', + interp=['INTERP', 'IARG'], + test_driver=['driver', 'darg'], + timeout='secs') + + expected = ['INTERP', 'IARG', + 'driver', 'darg', + '--resultlog=LOGFILE', + 'test_one'] + + assert self.called == (expected, '/wd', 'out', 'secs') + assert res == 0 + + def test_explicit_win32(self): + res = runner.execute_test('/wd', 'test_one', 'out', 'LOGFILE', + interp=['./INTERP', 'IARG'], + test_driver=['driver', 'darg'], + timeout='secs', + _win32=True + ) + + expected = ['/wd' + os.sep + './INTERP', 'IARG', + 'driver', 'darg', + '--resultlog=LOGFILE', + 'test_one'] + + assert self.called == (expected, '/wd', 'out', 'secs') + assert res == 0 + + def test_error(self): + self.exitcode[:] = [1] + res = runner.execute_test('/wd', 'test_one', 'out', 'LOGFILE', + interp=['INTERP', 'IARG'], + test_driver=['driver', 'darg']) + assert res == 1 + + + self.exitcode[:] = [-signal.SIGSEGV] + res = runner.execute_test('/wd', 'test_one', 'out', 'LOGFILE', + interp=['INTERP', 'IARG'], + test_driver=['driver', 'darg']) + assert res == -signal.SIGSEGV + + def test_interpret_exitcode(self): + failure, extralog = runner.interpret_exitcode(0, "test_foo") + assert not failure + assert extralog == "" + + failure, extralog = runner.interpret_exitcode(1, "test_foo", "") + assert failure + assert extralog == """! test_foo + Exit code 1. +""" + + failure, extralog = runner.interpret_exitcode(1, "test_foo", "F Foo\n") + assert failure + assert extralog == "" + + failure, extralog = runner.interpret_exitcode(2, "test_foo") + assert failure + assert extralog == """! test_foo + Exit code 2. +""" + + failure, extralog = runner.interpret_exitcode(-signal.SIGSEGV, + "test_foo") + assert failure + assert extralog == """! test_foo + Killed by SIGSEGV. +""" + +class RunnerTests(object): + with_thread = True + + def setup_class(cls): + cls.real_invoke_in_thread = (runner.invoke_in_thread,) + if not cls.with_thread: + runner.invoke_in_thread = lambda func, args: func(*args) + + cls.udir = py.path.local.make_numbered_dir(prefix='usession-runner-', + keep=3) + cls.manydir = cls.udir.join('many').ensure(dir=1) + + cls.udir.join("conftest.py").write("pytest_plugins = 'resultlog'\n") + + def fill_test_dir(test_dir, fromdir='normal'): + for p in py.path.local(__file__).dirpath( + 'examples', fromdir).listdir("*.py"): + p.copy(test_dir.join('test_'+p.basename)) + + + test_normal_dir0 = cls.manydir.join('one', 'test_normal').ensure(dir=1) + cls.one_test_dir = cls.manydir.join('one') + + fill_test_dir(test_normal_dir0) + + + test_normal_dir1 = cls.manydir.join('two', 'test_normal1').ensure(dir=1) + test_normal_dir2 = cls.manydir.join('two', 'pkg', + 'test_normal2').ensure(dir=1) + cls.two_test_dir = cls.manydir.join('two') + + fill_test_dir(test_normal_dir1) + fill_test_dir(test_normal_dir2) + + cls.test_stall_dir = cls.udir.join('stall').ensure(dir=1) + test_stall_dir0 = cls.test_stall_dir.join('zero').ensure(dir=1) + fill_test_dir(test_stall_dir0, 'stall') + + def teardown_class(cls): + runner.invoke_in_thread = cls.real_invoke_in_thread[0] + + def test_one_dir(self): + test_driver = [pytest_script] + + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + run_param = runner.RunParam(self.one_test_dir) + run_param.test_driver = test_driver + run_param.parallel_runs = 3 + + res = runner.execute_tests(run_param, ['test_normal'], log, out) + + assert res + + out = out.getvalue() + assert out + assert '\r\n' not in out + assert '\n' in out + + log = log.getvalue() + assert '\r\n' not in log + assert '\n' in log + log_lines = log.splitlines() + + assert log_lines[0] == ". test_normal/test_example.py:test_one" + nfailures = 0 + noutcomes = 0 + for line in log_lines: + if line[0] != ' ': + noutcomes += 1 + if line[0] != '.': + nfailures += 1 + + assert noutcomes == 107 + assert nfailures == 6 + + def test_one_dir_dry_run(self): + test_driver = [pytest_script] + + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + run_param = runner.RunParam(self.one_test_dir) + run_param.test_driver = test_driver + run_param.parallel_runs = 3 + run_param.dry_run = True + + res = runner.execute_tests(run_param, ['test_normal'], log, out) + + assert not res + + assert log.getvalue() == "" + + out_lines = out.getvalue().splitlines() + + assert len(out_lines) == 5 + + assert out_lines[2].startswith("++ starting") + assert out_lines[4].startswith("run [") + for line in out_lines[2:]: + assert "test_normal" in line + + def test_many_dirs(self): + test_driver = [pytest_script] + + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + cleanedup = [] + def cleanup(testdir): + cleanedup.append(testdir) + + run_param = runner.RunParam(self.manydir) + run_param.test_driver = test_driver + run_param.parallel_runs = 3 + run_param.cleanup = cleanup + + testdirs = [] + run_param.collect_testdirs(testdirs) + alltestdirs = testdirs[:] + + res = runner.execute_tests(run_param, testdirs, log, out) + + assert res + + assert out.getvalue() + + log_lines = log.getvalue().splitlines() + + nfailures = 0 + noutcomes = 0 + for line in log_lines: + if line[0] != ' ': + noutcomes += 1 + if line[0] != '.': + nfailures += 1 + + assert noutcomes == 3*107 + assert nfailures == 3*6 + + assert set(cleanedup) == set(alltestdirs) + + def test_timeout(self): + test_driver = [pytest_script] + + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + run_param = runner.RunParam(self.test_stall_dir) + run_param.test_driver = test_driver + run_param.parallel_runs = 3 + run_param.timeout = 3 + + testdirs = [] + run_param.collect_testdirs(testdirs) + res = runner.execute_tests(run_param, testdirs, log, out) + assert res + + log_lines = log.getvalue().splitlines() + assert log_lines[1] == ' TIMEOUT' + + def test_run_wrong_interp(self): + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + run_param = runner.RunParam(self.one_test_dir) + run_param.interp = ['wrong-interp'] + run_param.parallel_runs = 3 + + testdirs = [] + run_param.collect_testdirs(testdirs) + res = runner.execute_tests(run_param, testdirs, log, out) + assert res + + log_lines = log.getvalue().splitlines() + assert log_lines[1] == ' Failed to run interp' + + def test_run_bad_get_test_driver(self): + test_driver = [pytest_script] + + log = cStringIO.StringIO() + out = cStringIO.StringIO() + + run_param = runner.RunParam(self.one_test_dir) + run_param.parallel_runs = 3 + def boom(testdir): + raise RuntimeError("Boom") + run_param.get_test_driver = boom + + testdirs = [] + run_param.collect_testdirs(testdirs) + res = runner.execute_tests(run_param, testdirs, log, out) + assert res + + log_lines = log.getvalue().splitlines() + assert log_lines[1] == ' Failed with exception in execute-test' + + +class TestRunnerNoThreads(RunnerTests): + with_thread = False + + def test_collect_testdirs(self): + res = [] + seen = [] + run_param = runner.RunParam(self.one_test_dir) + real_collect_one_testdir = run_param.collect_one_testdir + + def witness_collect_one_testdir(testdirs, reldir, tests): + seen.append((reldir, sorted(map(str, tests)))) + real_collect_one_testdir(testdirs, reldir, tests) + + run_param.collect_one_testdir = witness_collect_one_testdir + + run_param.collect_testdirs(res) + + assert res == ['test_normal'] + assert len(seen) == 1 + reldir, tests = seen[0] + assert reldir == 'test_normal' + for test in tests: + assert test.startswith('test_normal/') + + run_param.collect_one_testdir = real_collect_one_testdir + res = [] + run_param = runner.RunParam(self.two_test_dir) + + run_param.collect_testdirs(res) + + assert sorted(res) == ['pkg/test_normal2', 'test_normal1'] + + +class TestRunner(RunnerTests): + pass + diff --git a/testrunner/runner.py b/testrunner/runner.py new file mode 100644 --- /dev/null +++ b/testrunner/runner.py @@ -0,0 +1,393 @@ +import sys, os, signal, thread, Queue, time +import py +import subprocess, optparse + +if sys.platform == 'win32': + PROCESS_TERMINATE = 0x1 + try: + import win32api, pywintypes + except ImportError: + def _kill(pid, sig): + import ctypes + winapi = ctypes.windll.kernel32 + proch = winapi.OpenProcess(PROCESS_TERMINATE, 0, pid) + winapi.TerminateProcess(proch, 1) == 1 + winapi.CloseHandle(proch) + else: + def _kill(pid, sig): + try: + proch = win32api.OpenProcess(PROCESS_TERMINATE, 0, pid) + win32api.TerminateProcess(proch, 1) + win32api.CloseHandle(proch) + except pywintypes.error, e: + pass + + SIGKILL = SIGTERM = 0 + READ_MODE = 'rU' + WRITE_MODE = 'wb' +else: + def _kill(pid, sig): + try: + os.kill(pid, sig) + except OSError: + pass + + SIGKILL = signal.SIGKILL + SIGTERM = signal.SIGTERM + READ_MODE = 'r' + WRITE_MODE = 'w' + +EXECUTEFAILED = -1001 +RUNFAILED = -1000 +TIMEDOUT = -999 + +def busywait(p, timeout): + t0 = time.time() + delay = 0.5 + while True: + time.sleep(delay) + returncode = p.poll() + if returncode is not None: + return returncode + tnow = time.time() + if (tnow-t0) >= timeout: + return None + delay = min(delay * 1.15, 7.2) + +def run(args, cwd, out, timeout=None): + f = out.open('w') + try: + try: + p = subprocess.Popen(args, cwd=str(cwd), stdout=f, stderr=f) + except Exception, e: + f.write("Failed to run %s with cwd='%s' timeout=%s:\n" + " %s\n" + % (args, cwd, timeout, e)) + return RUNFAILED + + if timeout is None: + return p.wait() + else: + returncode = busywait(p, timeout) + if returncode is not None: + return returncode + # timeout! + _kill(p.pid, SIGTERM) + if busywait(p, 10) is None: + _kill(p.pid, SIGKILL) + return TIMEDOUT + finally: + f.close() + +def dry_run(args, cwd, out, timeout=None): + f = out.open('w') + try: + f.write("run %s with cwd='%s' timeout=%s\n" % (args, cwd, timeout)) + finally: + f.close() + return 0 + +def getsignalname(n): + for name, value in signal.__dict__.items(): + if value == n and name.startswith('SIG'): + return name + return 'signal %d' % (n,) + +def execute_test(cwd, test, out, logfname, interp, test_driver, + do_dry_run=False, timeout=None, + _win32=(sys.platform=='win32')): + args = interp + test_driver + args += ['-p', 'resultlog', '--resultlog=%s' % logfname, test] + + args = map(str, args) + interp0 = args[0] + if (_win32 and not os.path.isabs(interp0) and + ('\\' in interp0 or '/' in interp0)): + args[0] = os.path.join(str(cwd), interp0) + + if do_dry_run: + runfunc = dry_run + else: + runfunc = run + + exitcode = runfunc(args, cwd, out, timeout=timeout) + + return exitcode + +def should_report_failure(logdata): + # When we have an exitcode of 1, it might be because of failures + # that occurred "regularly", or because of another crash of py.test. + # We decide heuristically based on logdata: if it looks like it + # contains "F", "E" or "P" then it's a regular failure, otherwise + # we have to report it. + for line in logdata.splitlines(): + if (line.startswith('F ') or + line.startswith('E ') or + line.startswith('P ')): + return False + return True + +def interpret_exitcode(exitcode, test, logdata=""): + extralog = "" + if exitcode: + failure = True + if exitcode != 1 or should_report_failure(logdata): + if exitcode > 0: + msg = "Exit code %d." % exitcode + elif exitcode == TIMEDOUT: + msg = "TIMEOUT" + elif exitcode == RUNFAILED: + msg = "Failed to run interp" + elif exitcode == EXECUTEFAILED: + msg = "Failed with exception in execute-test" + else: + msg = "Killed by %s." % getsignalname(-exitcode) + extralog = "! %s\n %s\n" % (test, msg) + else: + failure = False + return failure, extralog + +def worker(num, n, run_param, testdirs, result_queue): + sessdir = run_param.sessdir + root = run_param.root + get_test_driver = run_param.get_test_driver + interp = run_param.interp + dry_run = run_param.dry_run + timeout = run_param.timeout + cleanup = run_param.cleanup + # xxx cfg thread start + while 1: + try: + test = testdirs.pop(0) + except IndexError: + result_queue.put(None) # done + return + result_queue.put(('start', test)) + basename = py.path.local(test).purebasename + logfname = sessdir.join("%d-%s-pytest-log" % (num, basename)) + one_output = sessdir.join("%d-%s-output" % (num, basename)) + num += n + + try: + test_driver = get_test_driver(test) + exitcode = execute_test(root, test, one_output, logfname, + interp, test_driver, do_dry_run=dry_run, + timeout=timeout) + + cleanup(test) + except: + print "execute-test for %r failed with:" % test + import traceback + traceback.print_exc() + exitcode = EXECUTEFAILED + + if one_output.check(file=1): + output = one_output.read(READ_MODE) + else: + output = "" + if logfname.check(file=1): + logdata = logfname.read(READ_MODE) + else: + logdata = "" + + failure, extralog = interpret_exitcode(exitcode, test, logdata) + + if extralog: + logdata += extralog + + result_queue.put(('done', test, failure, logdata, output)) + +invoke_in_thread = thread.start_new_thread + +def start_workers(n, run_param, testdirs): + result_queue = Queue.Queue() + for i in range(n): + invoke_in_thread(worker, (i, n, run_param, testdirs, + result_queue)) + return result_queue + + +def execute_tests(run_param, testdirs, logfile, out): + sessdir = py.path.local.make_numbered_dir(prefix='usession-testrunner-', + keep=4) + run_param.sessdir = sessdir + + run_param.startup() + + N = run_param.parallel_runs + failure = False + + for testname in testdirs: + out.write("-- %s\n" % testname) + out.write("-- total: %d to run\n" % len(testdirs)) + + result_queue = start_workers(N, run_param, testdirs) + + done = 0 + started = 0 + + worker_done = 0 + while True: + res = result_queue.get() + if res is None: + worker_done += 1 + if worker_done == N: + break + continue + + if res[0] == 'start': + started += 1 + out.write("++ starting %s [%d started in total]\n" % (res[1], + started)) + continue + + testname, somefailed, logdata, output = res[1:] + done += 1 + failure = failure or somefailed + + heading = "__ %s [%d done in total] " % (testname, done) + + out.write(heading + (79-len(heading))*'_'+'\n') + + out.write(output) + if logdata: + logfile.write(logdata) + + run_param.shutdown() + + return failure + + +class RunParam(object): + dry_run = False + interp = [os.path.abspath(sys.executable)] + pytestpath = os.path.abspath(os.path.join('py', 'bin', 'py.test')) + if not os.path.exists(pytestpath): + pytestpath = os.path.abspath(os.path.join('pytest.py')) + assert os.path.exists(pytestpath) + test_driver = [pytestpath] + + parallel_runs = 1 + timeout = None + cherrypick = None + + def __init__(self, root): + self.root = root + self.self = self + + def startup(self): + pass + + def shutdown(self): + pass + + def get_test_driver(self, testdir): + return self.test_driver + + def is_test_py_file(self, p): + name = p.basename + return name.startswith('test_') and name.endswith('.py') + + def reltoroot(self, p): + rel = p.relto(self.root) + return rel.replace(os.sep, '/') + + def collect_one_testdir(self, testdirs, reldir, tests): + testdirs.append(reldir) + return + + def collect_testdirs(self, testdirs, p=None): + if p is None: + p = self.root + + reldir = self.reltoroot(p) + entries = [p1 for p1 in p.listdir() if p1.check(dotfile=0)] + entries.sort() + + if p != self.root: + for p1 in entries: + if self.is_test_py_file(p1): + self.collect_one_testdir(testdirs, reldir, + [self.reltoroot(t) for t in entries + if self.is_test_py_file(t)]) + return + + for p1 in entries: + if p1.check(dir=1, link=0): + self.collect_testdirs(testdirs, p1) + + def cleanup(self, testdir): + pass + + +def main(args): + parser = optparse.OptionParser() + parser.add_option("--logfile", dest="logfile", default=None, + help="accumulated machine-readable logfile") + parser.add_option("--output", dest="output", default='-', + help="plain test output (default: stdout)") + parser.add_option("--config", dest="config", default=[], + action="append", + help="configuration python file (optional)") + parser.add_option("--root", dest="root", default=".", + help="root directory for the run") + parser.add_option("--parallel-runs", dest="parallel_runs", default=0, + type="int", + help="number of parallel test runs") + parser.add_option("--dry-run", dest="dry_run", default=False, + action="store_true", + help="dry run"), + parser.add_option("--timeout", dest="timeout", default=None, + type="int", + help="timeout in secs for test processes") + + opts, args = parser.parse_args(args) + + if opts.logfile is None: + print "no logfile specified" + sys.exit(2) + + logfile = open(opts.logfile, WRITE_MODE) + if opts.output == '-': + out = sys.stdout + else: + out = open(opts.output, WRITE_MODE) + + root = py.path.local(opts.root) + + testdirs = [] + + run_param = RunParam(root) + # the config files are python files whose run overrides the content + # of the run_param instance namespace + # in that code function overriding method should not take self + # though a self and self.__class__ are available if needed + for config_py_file in opts.config: + config_py_file = os.path.expanduser(config_py_file) + if py.path.local(config_py_file).check(file=1): + print >>out, "using config", config_py_file + execfile(config_py_file, run_param.__dict__) + + if run_param.cherrypick: + for p in run_param.cherrypick: + run_param.collect_testdirs(testdirs, root.join(p)) + else: + run_param.collect_testdirs(testdirs) + + if opts.parallel_runs: + run_param.parallel_runs = opts.parallel_runs + if opts.timeout: + run_param.timeout = opts.timeout + run_param.dry_run = opts.dry_run + + if run_param.dry_run: + print >>out, run_param.__dict__ + + res = execute_tests(run_param, testdirs, logfile, out) + + if res: + sys.exit(1) + + +if __name__ == '__main__': + main(sys.argv) diff --git a/testrunner/scratchbox_runner.py b/testrunner/scratchbox_runner.py new file mode 100644 --- /dev/null +++ b/testrunner/scratchbox_runner.py @@ -0,0 +1,27 @@ + +""" This is a very hackish runner for cross compilation toolchain scratchbox. +Later on we might come out with some general solution +""" + +import os + +def args_for_scratchbox(cwd, args): + return ['/scratchbox/login', '-d', str(cwd)] + args + +def run_scratchbox(args, cwd, out, timeout=None): + return run(args_for_scratchbox(cwd, args), cwd, out, timeout) + +def dry_run_scratchbox(args, cwd, out, timeout=None): + return dry_run(args_for_scratchbox(cwd, args), cwd, out, timeout) + +import runner +# XXX hack hack hack +dry_run = runner.dry_run +run = runner.run + +runner.dry_run = dry_run_scratchbox +runner.run = run_scratchbox + +if __name__ == '__main__': + import sys + runner.main(sys.argv) diff --git a/.hgsubstate b/.hgsubstate --- a/.hgsubstate +++ b/.hgsubstate @@ -1,3 +1,2 @@ 80037 greenlet 80409 lib_pypy/pyrepl -80409 testrunner diff --git a/testrunner/test/__init__.py b/testrunner/test/__init__.py new file mode 100644 diff --git a/testrunner/test/test_scratchbox_runner.py b/testrunner/test/test_scratchbox_runner.py new file mode 100644 --- /dev/null +++ b/testrunner/test/test_scratchbox_runner.py @@ -0,0 +1,5 @@ +import scratchbox_runner + +def test_scratchbox(): + expected = ['/scratchbox/login', '-d', 'x/y', 'a', 'b'] + assert scratchbox_runner.args_for_scratchbox('x/y', ['a', 'b']) == expected diff --git a/testrunner/test/examples/stall/example.py b/testrunner/test/examples/stall/example.py new file mode 100644 --- /dev/null +++ b/testrunner/test/examples/stall/example.py @@ -0,0 +1,5 @@ + + +def test_hanging(): + while True: + pass diff --git a/testrunner/test/examples/normal/failingsetup_tricky.py b/testrunner/test/examples/normal/failingsetup_tricky.py new file mode 100644 --- /dev/null +++ b/testrunner/test/examples/normal/failingsetup_tricky.py @@ -0,0 +1,6 @@ + +def setup_module(mod): + raise RuntimeError + +def test_goo(self): + yield (lambda: None) From commits-noreply at bitbucket.org Sat Mar 19 04:42:35 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sat, 19 Mar 2011 04:42:35 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Committing before sync. Message-ID: <20110319034235.7213A282BAA@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42800:84a7ba8b4d4e Date: 2011-03-18 00:30 -0700 http://bitbucket.org/pypy/pypy/changeset/84a7ba8b4d4e/ Log: Committing before sync. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -8,6 +8,7 @@ from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify from pypy.jit.metainterp.optimizeopt.addition import OptAddition +from pypy.jit.metainterp.optimizeopt.last_setitem import OptLastSetitem from pypy.rlib.jit import PARAMETERS from pypy.rlib.unroll import unrolling_iterable @@ -15,6 +16,7 @@ ('intbounds', OptIntBounds), ('addition', OptAddition), ('rewrite', OptRewrite), + #('lastsetitem', OptLastSetitem), ('virtualize', OptVirtualize), ('string', OptString), ('heap', OptHeap), From commits-noreply at bitbucket.org Sat Mar 19 04:45:10 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sat, 19 Mar 2011 04:45:10 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Merge with default. Message-ID: <20110319034510.DB69A282BAA@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42801:06542ec3e557 Date: 2011-03-18 00:36 -0700 http://bitbucket.org/pypy/pypy/changeset/06542ec3e557/ Log: Merge with default. diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - -.. sectnum:: -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: -.. sectnum:: - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,237 +0,0 @@ -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,59 +0,0 @@ - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.txt deleted file mode 100644 --- a/pypy/doc/jit/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -======================================================================== - JIT documentation -======================================================================== - -:abstract: - - When PyPy is translated into an executable like ``pypy-c``, the - executable contains a full virtual machine that can optionally - include a Just-In-Time compiler. This JIT compiler is **generated - automatically from the interpreter** that we wrote in RPython. - - This JIT Compiler Generator can be applied on interpreters for any - language, as long as the interpreter itself is written in RPython - and contains a few hints to guide the JIT Compiler Generator. - - -Content ------------------------------------------------------------- - -- Overview_: motivating our approach - -- Notes_ about the current work in PyPy - - -.. _Overview: overview.html -.. _Notes: pyjitpl5.html diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,123 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. contents:: -.. sectnum:: - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/jit/overview.txt b/pypy/doc/jit/overview.txt deleted file mode 100644 --- a/pypy/doc/jit/overview.txt +++ /dev/null @@ -1,195 +0,0 @@ ------------------------------------------------------------------------- - Motivating JIT Compiler Generation ------------------------------------------------------------------------- - -.. contents:: -.. sectnum:: - -This is a non-technical introduction and motivation for PyPy's approach -to Just-In-Time compiler generation. - - -Motivation -======================================================================== - -Overview --------- - -Writing an interpreter for a complex dynamic language like Python is not -a small task, especially if, for performance goals, we want to write a -Just-in-Time (JIT) compiler too. - -The good news is that it's not what we did. We indeed wrote an -interpreter for Python, but we never wrote any JIT compiler for Python -in PyPy. Instead, we use the fact that our interpreter for Python is -written in RPython, which is a nice, high-level language -- and we turn -it *automatically* into a JIT compiler for Python. - -This transformation is of course completely transparent to the user, -i.e. the programmer writing Python programs. The goal (which we -achieved) is to support *all* Python features -- including, for example, -random frame access and debuggers. But it is also mostly transparent to -the language implementor, i.e. to the source code of the Python -interpreter. It only needs a bit of guidance: we had to put a small -number of hints in the source code of our interpreter. Based on these -hints, the *JIT compiler generator* produces a JIT compiler which has -the same language semantics as the original interpreter by construction. -This JIT compiler itself generates machine code at runtime, aggressively -optimizing the user's program and leading to a big performance boost, -while keeping the semantics unmodified. Of course, the interesting bit -is that our Python language interpreter can evolve over time without -getting out of sync with the JIT compiler. - - -The path we followed --------------------- - -Our previous incarnations of PyPy's JIT generator were based on partial -evaluation. This is a well-known and much-researched topic, considered -to be very promising. There have been many attempts to use it to -automatically transform an interpreter into a compiler. However, none of -them have lead to substantial speedups for real-world languages. We -believe that the missing key insight is to use partial evaluation to -produce just-in-time compilers, rather than classical ahead-of-time -compilers. If this turns out to be correct, the practical speed of -dynamic languages could be vastly improved. - -All these previous JIT compiler generators were producing JIT compilers -similar to the hand-written Psyco. But today, starting from 2009, our -prototype is no longer using partial evaluation -- at least not in a way -that would convince paper reviewers. It is instead based on the notion -of *tracing JIT,* recently studied for Java and JavaScript. When -compared to all existing tracing JITs so far, however, partial -evaluation gives us some extra techniques that we already had in our -previous JIT generators, notably how to optimize structures by removing -allocations. - -The closest comparison to our current JIT is Tamarin's TraceMonkey. -However, this JIT compiler is written manually, which is quite some -effort. In PyPy, we write a JIT generator at the level of RPython, -which means that our final JIT does not have to -- indeed, cannot -- be -written to encode all the details of the full Python language. These -details are automatically supplied by the fact that we have an -interpreter for full Python. - - -Practical results ------------------ - -The JIT compilers that we generate use some techniques that are not in -widespread use so far, but they are not exactly new either. The point -we want to make here is not that we are pushing the theoretical limits -of how fast a given dynamic language can be run. Our point is: we are -making it **practical** to have reasonably good Just-In-Time compilers -for all dynamic languages, no matter how complicated or non-widespread -(e.g. Open Source dynamic languages without large industry or academic -support, or internal domain-specific languages). By practical we mean -that this should be: - -* Easy: requires little more efforts than writing the interpreter in the - first place. - -* Maintainable: our generated JIT compilers are not separate projects - (we do not generate separate source code, but only throw-away C code - that is compiled into the generated VM). In other words, the whole - JIT compiler is regenerated anew every time the high-level interpreter - is modified, so that they cannot get out of sync no matter how fast - the language evolves. - -* Fast enough: we can get some rather good performance out of the - generated JIT compilers. That's the whole point, of course. - - -Alternative approaches to improve speed -======================================================================== - -+----------------------------------------------------------------------+ -| :NOTE: | -| | -| Please take the following section as just a statement of opinion. | -| In order to be debated over, the summaries should first be | -| expanded into full arguments. We include them here as links; | -| we are aware of them, even if sometimes pessimistic about them | -| ``:-)`` | -+----------------------------------------------------------------------+ - -There are a large number of approaches to improving the execution speed of -dynamic programming languages, most of which only produce small improvements -and none offer the flexibility and customisability provided by our approach. -Over the last 6 years of tweaking, the speed of CPython has only improved by a -factor of 1.3 or 1.4 (depending on benchmarks). Many tweaks are applicable to -PyPy as well. Indeed, some of the CPython tweaks originated as tweaks for PyPy. - -IronPython initially achieved a speed of about 1.8 times that of CPython by -leaving out some details of the language and by leveraging the large investment -that Microsoft has put into making the .NET platform fast; the current, more -complete implementation has roughly the same speed as CPython. In general, the -existing approaches have reached the end of the road, speed-wise. Microsoft's -Dynamic Language Runtime (DLR), often cited in this context, is essentially -only an API to make the techniques pioneered in IronPython official. At best, -it will give another small improvement. - -Another technique regularly mentioned is adding types to the language in order -to speed it up: either explicit optional typing or soft typing (i.e., inferred -"likely" types). For Python, all projects in this area have started with a -simplified subset of the language; no project has scaled up to anything close -to the complete language. This would be a major effort and be platform- and -language-specific. Moreover maintenance would be a headache: we believe that -many changes that are trivial to implement in CPython, are likely to invalidate -previous carefully-tuned optimizations. - -For major improvements in speed, JIT techniques are necessary. For Python, -Psyco gives typical speedups of 2 to 4 times - up to 100 times in algorithmic -examples. It has come to a dead end because of the difficulty and huge costs -associated with developing and maintaining it. It has a relatively poor -encoding of language semantics - knowledge about Python behavior needs to be -encoded by hand and kept up-to-date. At least, Psyco works correctly even when -encountering one of the numerous Python constructs it does not support, by -falling back to CPython. The PyPy JIT started out as a metaprogrammatic, -non-language-specific equivalent of Psyco. - -A different kind of prior art are self-hosting JIT compilers such as Jikes. -Jikes is a JIT compiler for Java written in Java. It has a poor encoding of -language semantics; it would take an enormous amount of work to encode all the -details of a Python-like language directly into a JIT compiler. It also has -limited portability, which is an issue for Python; it is likely that large -parts of the JIT compiler would need retargetting in order to run in a -different environment than the intended low-level one. - -Simply reusing an existing well-tuned JIT like that of the JVM does not -really work, because of concept mismatches between the implementor's -language and the host VM language: the former needs to be compiled to -the target environment in such a way that the JIT is able to speed it up -significantly - an approach which essentially has failed in Python so -far: even though CPython is a simple interpreter, its Java and .NET -re-implementations are not significantly faster. - -More recently, several larger projects have started in the JIT area. For -instance, Sun Microsystems is investing in JRuby, which aims to use the Java -Hotspot JIT to improve the performance of Ruby. However, this requires a lot of -hand crafting and will only provide speedups for one language on one platform. -Some issues are delicate, e.g., how to remove the overhead of constantly boxing -and unboxing, typical in dynamic languages. An advantage compared to PyPy is -that there are some hand optimizations that can be performed, that do not fit -in the metaprogramming approach. But metaprogramming makes the PyPy JIT -reusable for many different languages on many different execution platforms. -It is also possible to combine the approaches - we can get substantial speedups -using our JIT and then feed the result to Java's Hotspot JIT for further -improvement. One of us is even a member of the `JSR 292`_ Expert Group -to define additions to the JVM to better support dynamic languages, and -is contributing insights from our JIT research, in ways that will also -benefit PyPy. - -Finally, tracing JITs are now emerging for dynamic languages like -JavaScript with TraceMonkey. The code generated by PyPy is very similar -(but not hand-written) to the concepts of tracing JITs. - - -Further reading -======================================================================== - -The description of the current PyPy JIT generator is given in PyJitPl5_ -(draft). - -.. _`JSR 292`: http://jcp.org/en/jsr/detail?id=292 -.. _PyJitPl5: pyjitpl5.html diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: -.. sectnum:: - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: -.. sectnum:: - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -14,6 +14,7 @@ from pypy.jit.metainterp.resoperation import rop, opname, ResOperation from pypy.jit.tool.oparser import pure_parse from pypy.jit.metainterp.test.test_optimizebasic import equaloplists +from pypy.jit.metainterp.optimizeutil import args_dict class Fake(object): failargs_limit = 1000 @@ -161,7 +162,8 @@ assert equaloplists(optimized.operations, expected.operations, False, remap, text_right) - def optimize_loop(self, ops, optops, expected_preamble=None): + def optimize_loop(self, ops, optops, expected_preamble=None, + call_pure_results=None): loop = self.parse(ops) if optops != "crash!": expected = self.parse(optops) @@ -171,6 +173,10 @@ expected_preamble = self.parse(expected_preamble) # self.loop = loop + loop.call_pure_results = args_dict() + if call_pure_results is not None: + for k, v in call_pure_results.items(): + loop.call_pure_results[list(k)] = v loop.preamble = TreeLoop('preamble') loop.preamble.inputargs = loop.inputargs loop.preamble.token = LoopToken() @@ -2893,7 +2899,7 @@ ops = ''' [p1, i1, i4] setfield_gc(p1, i1, descr=valuedescr) - i3 = call_pure(42, p1, descr=plaincalldescr) + i3 = call_pure(p1, descr=plaincalldescr) setfield_gc(p1, i3, descr=valuedescr) jump(p1, i4, i3) ''' @@ -2911,7 +2917,7 @@ ops = ''' [p1, i1, i4] setfield_gc(p1, i1, descr=valuedescr) - i3 = call_pure(42, p1, descr=plaincalldescr) + i3 = call_pure(p1, descr=plaincalldescr) setfield_gc(p1, i1, descr=valuedescr) jump(p1, i4, i3) ''' @@ -2931,12 +2937,14 @@ # the result of the call, recorded as the first arg), or turned into # a regular CALL. # XXX can this test be improved with unrolling? + arg_consts = [ConstInt(i) for i in (123456, 4, 5, 6)] + call_pure_results = {tuple(arg_consts): ConstInt(42)} ops = ''' [i0, i1, i2] escape(i1) escape(i2) - i3 = call_pure(42, 123456, 4, 5, 6, descr=plaincalldescr) - i4 = call_pure(43, 123456, 4, i0, 6, descr=plaincalldescr) + i3 = call_pure(123456, 4, 5, 6, descr=plaincalldescr) + i4 = call_pure(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, i3, i4) ''' preamble = ''' @@ -2953,7 +2961,7 @@ i4 = call(123456, 4, i0, 6, descr=plaincalldescr) jump(i0, i4) ''' - self.optimize_loop(ops, expected, preamble) + self.optimize_loop(ops, expected, preamble, call_pure_results) # ---------- diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: -.. sectnum:: - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Sat Mar 19 04:45:11 2011 From: commits-noreply at bitbucket.org (ademan) Date: Sat, 19 Mar 2011 04:45:11 +0100 (CET) Subject: [pypy-svn] pypy fold_intadd: Changed test to reflect my optimizeopt's decision to emit int_sub(iX, -x) when x < 0 Message-ID: <20110319034511.8510A282BAA@codespeak.net> Author: Daniel Roberts Branch: fold_intadd Changeset: r42802:386510d0fb45 Date: 2011-03-18 20:41 -0700 http://bitbucket.org/pypy/pypy/changeset/386510d0fb45/ Log: Changed test to reflect my optimizeopt's decision to emit int_sub(iX, -x) when x < 0 diff --git a/pypy/jit/metainterp/test/test_jitdriver.py b/pypy/jit/metainterp/test/test_jitdriver.py --- a/pypy/jit/metainterp/test/test_jitdriver.py +++ b/pypy/jit/metainterp/test/test_jitdriver.py @@ -37,7 +37,8 @@ assert res == loop2(4, 40) # we expect only one int_sub, corresponding to the single # compiled instance of loop1() - self.check_loops(int_sub=1) + # XXX: OptAddition turns + (-1) into - 1 + self.check_loops(int_sub=2) # the following numbers are not really expectations of the test # itself, but just the numbers that we got after looking carefully # at the generated machine code @@ -97,8 +98,8 @@ # res = self.meta_interp(loop2, [4, 40], repeat=7, inline=True) assert res == loop2(4, 40) - # we expect no int_sub, but a residual call - self.check_loops(int_sub=0, call=1) + # one int_sub, a residual call, and no int_add + self.check_loops(int_add=1, int_sub=1, call=1) def test_multiple_jits_trace_too_long(self): myjitdriver1 = JitDriver(greens=["n"], reds=["i", "box"]) From commits-noreply at bitbucket.org Sat Mar 19 20:16:15 2011 From: commits-noreply at bitbucket.org (hpk42) Date: Sat, 19 Mar 2011 20:16:15 +0100 (CET) Subject: [pypy-svn] pypy default: update to pytest-2.0.3.dev3, containing few fixes/speedups Message-ID: <20110319191615.CDDEC282BAD@codespeak.net> Author: holger krekel Branch: Changeset: r42803:143c77a3550f Date: 2011-03-19 20:15 +0100 http://bitbucket.org/pypy/pypy/changeset/143c77a3550f/ Log: update to pytest-2.0.3.dev3, containing few fixes/speedups diff --git a/_pytest/__init__.py b/_pytest/__init__.py --- a/_pytest/__init__.py +++ b/_pytest/__init__.py @@ -1,1 +1,2 @@ # +__version__ = '2.0.3.dev3' diff --git a/_pytest/python.py b/_pytest/python.py --- a/_pytest/python.py +++ b/_pytest/python.py @@ -70,11 +70,13 @@ res = __multicall__.execute() if res is not None: return res - if collector._istestclasscandidate(name, obj): + if inspect.isclass(obj): #if hasattr(collector.obj, 'unittest'): # return # we assume it's a mixin class for a TestCase derived one - Class = collector._getcustomclass("Class") - return Class(name, parent=collector) + if collector.classnamefilter(name): + if not hasinit(obj): + Class = collector._getcustomclass("Class") + return Class(name, parent=collector) elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): if is_generator(obj): return Generator(name, parent=collector) @@ -194,14 +196,6 @@ return self.ihook.pytest_pycollect_makeitem( collector=self, name=name, obj=obj) - def _istestclasscandidate(self, name, obj): - if self.classnamefilter(name) and \ - inspect.isclass(obj): - if hasinit(obj): - # XXX WARN - return False - return True - def _genfunctions(self, name, funcobj): module = self.getparent(Module).obj clscol = self.getparent(Class) diff --git a/_pytest/core.py b/_pytest/core.py --- a/_pytest/core.py +++ b/_pytest/core.py @@ -164,14 +164,17 @@ def consider_preparse(self, args): for opt1,opt2 in zip(args, args[1:]): if opt1 == "-p": - if opt2.startswith("no:"): - name = opt2[3:] - if self.getplugin(name) is not None: - self.unregister(None, name=name) - self._name2plugin[name] = -1 - else: - if self.getplugin(opt2) is None: - self.import_plugin(opt2) + self.consider_pluginarg(opt2) + + def consider_pluginarg(self, arg): + if arg.startswith("no:"): + name = arg[3:] + if self.getplugin(name) is not None: + self.unregister(None, name=name) + self._name2plugin[name] = -1 + else: + if self.getplugin(arg) is None: + self.import_plugin(arg) def consider_conftest(self, conftestmodule): if self.register(conftestmodule, name=conftestmodule.__file__): diff --git a/_pytest/config.py b/_pytest/config.py --- a/_pytest/config.py +++ b/_pytest/config.py @@ -252,6 +252,16 @@ self.hook = self.pluginmanager.hook self._inicache = {} + @classmethod + def fromdictargs(cls, option_dict, args): + """ constructor useable for subprocesses. """ + config = cls() + config._preparse(args, addopts=False) + config.option.__dict__.update(option_dict) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + def _onimportconftest(self, conftestmodule): self.trace("loaded conftestmodule %r" %(conftestmodule,)) self.pluginmanager.consider_conftest(conftestmodule) diff --git a/pytest.py b/pytest.py --- a/pytest.py +++ b/pytest.py @@ -3,11 +3,11 @@ (pypy version of startup script) see http://pytest.org for details. """ -__version__ = '2.0.3.dev1' # base pytest version __all__ = ['main'] from _pytest.core import main, UsageError, _preloadplugins from _pytest import core as cmdline +from _pytest import __version__ # This pytest.py script is located in the pypy source tree # which has a copy of pytest and py within its source tree. diff --git a/_pytest/junitxml.py b/_pytest/junitxml.py --- a/_pytest/junitxml.py +++ b/_pytest/junitxml.py @@ -106,7 +106,13 @@ '%s', report.keywords['xfail']) else: - self.appendlog("") + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = skipreason[9:] + self.appendlog('%s', + skipreason, "%s:%s: %s" % report.longrepr, + ) self._closetestcase() self.skipped += 1 From commits-noreply at bitbucket.org Sun Mar 20 16:17:30 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Sun, 20 Mar 2011 16:17:30 +0100 (CET) Subject: [pypy-svn] pypy default: remove some unised imports/aliases in the translator driver Message-ID: <20110320151730.266EC282B90@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42804:c2c975392d7a Date: 2011-03-20 16:16 +0100 http://bitbucket.org/pypy/pypy/changeset/c2c975392d7a/ Log: remove some unised imports/aliases in the translator driver diff --git a/pypy/translator/driver.py b/pypy/translator/driver.py --- a/pypy/translator/driver.py +++ b/pypy/translator/driver.py @@ -2,14 +2,13 @@ import os.path import shutil -from pypy.translator.translator import TranslationContext, graphof +from pypy.translator.translator import TranslationContext from pypy.translator.tool.taskengine import SimpleTaskEngine from pypy.translator.goal import query from pypy.translator.goal.timing import Timer from pypy.annotation import model as annmodel from pypy.annotation.listdef import s_list_of_strings from pypy.annotation import policy as annpolicy -import optparse from pypy.tool.udir import udir from pypy.tool.debug_print import debug_start, debug_print, debug_stop from pypy.rlib.entrypoint import secondary_entrypoints @@ -229,8 +228,7 @@ if backend != 'c' or sys.platform == 'win32': raise Exception("instrumentation requires the c backend" " and unix for now") - from pypy.tool.udir import udir - + datafile = udir.join('_instrument_counters') makeProfInstrument = lambda compiler: ProfInstrument(datafile, compiler) @@ -518,7 +516,6 @@ def task_source_c(self): """ Create C source files from the generated database """ - translator = self.translator cbuilder = self.cbuilder database = self.database if self._backend_extra_options.get('c_debug_defines', False): From commits-noreply at bitbucket.org Sun Mar 20 17:45:41 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 20 Mar 2011 17:45:41 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Replace 'call_may_force' with 'call_release_gil' in order to call Message-ID: <20110320164541.6552D282B90@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42805:15b1142a750c Date: 2011-03-20 17:45 +0100 http://bitbucket.org/pypy/pypy/changeset/15b1142a750c/ Log: Replace 'call_may_force' with 'call_release_gil' in order to call external C functions. The idea is that 'call_release_gil' should release the GIL and, for the GC, close the stack in the real backend. diff --git a/pypy/jit/metainterp/optimizeopt/fficall.py b/pypy/jit/metainterp/optimizeopt/fficall.py --- a/pypy/jit/metainterp/optimizeopt/fficall.py +++ b/pypy/jit/metainterp/optimizeopt/fficall.py @@ -73,7 +73,7 @@ def setup(self): self.funcinfo = None - self.logger = self.optimizer.metainterp_sd.logger_noopt + self.logger = self.optimizer.metainterp_sd.logger_ops def propagate_begin_forward(self): debug_start('jit-log-ffiopt') @@ -188,7 +188,7 @@ for push_op in funcinfo.opargs: argval = self.getvalue(push_op.getarg(2)) arglist.append(argval.force_box()) - newop = ResOperation(rop.CALL_MAY_FORCE, arglist, op.result, + newop = ResOperation(rop.CALL_RELEASE_GIL, arglist, op.result, descr=funcinfo.descr) self.commit_optimization() ops = [] diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -161,6 +161,7 @@ assert opnum != rop.CALL_PURE if (opnum == rop.CALL or opnum == rop.CALL_MAY_FORCE or + opnum == rop.CALL_RELEASE_GIL or opnum == rop.CALL_ASSEMBLER): if opnum == rop.CALL_ASSEMBLER: effectinfo = None @@ -235,7 +236,7 @@ opnum = prevop.getopnum() lastop_args = lastop.getarglist() if ((prevop.is_comparison() or opnum == rop.CALL_MAY_FORCE - or prevop.is_ovf()) + or opnum == rop.CALL_RELEASE_GIL or prevop.is_ovf()) and prevop.result not in lastop_args): newoperations[-2] = lastop newoperations[-1] = prevop diff --git a/pypy/jit/metainterp/executor.py b/pypy/jit/metainterp/executor.py --- a/pypy/jit/metainterp/executor.py +++ b/pypy/jit/metainterp/executor.py @@ -80,9 +80,6 @@ do_call_loopinvariant = do_call do_call_may_force = do_call -def do_call_c(cpu, metainterp, argboxes, descr): - raise NotImplementedError("Should never be called directly") - def do_getarrayitem_gc(cpu, _, arraybox, indexbox, arraydescr): array = arraybox.getref_base() index = indexbox.getint() @@ -309,6 +306,7 @@ rop.DEBUG_MERGE_POINT, rop.JIT_DEBUG, rop.SETARRAYITEM_RAW, + rop.CALL_RELEASE_GIL, ): # list of opcodes never executed by pyjitpl continue raise AssertionError("missing %r" % (key,)) diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -534,7 +534,7 @@ func_adr = llmemory.cast_ptr_to_adr(c_tolower.funcsym) funcbox = ConstInt(heaptracker.adr2int(func_adr)) calldescr = self.cpu.calldescrof_dynamic([types.uchar], types.sint) - res = self.execute_operation(rop.CALL, + res = self.execute_operation(rop.CALL_RELEASE_GIL, [funcbox, BoxInt(ord('A'))], 'int', descr=calldescr) diff --git a/pypy/rlib/test/test_libffi.py b/pypy/rlib/test/test_libffi.py --- a/pypy/rlib/test/test_libffi.py +++ b/pypy/rlib/test/test_libffi.py @@ -159,7 +159,7 @@ res = self.call(func, [38, 4.2], rffi.LONG) assert res == 42 self.check_loops({ - 'call_may_force': 1, + 'call_release_gil': 1, 'guard_no_exception': 1, 'guard_not_forced': 1, 'int_add': 1, @@ -172,7 +172,7 @@ func = (libm, 'pow', [types.double, types.double], types.double) res = self.call(func, [2.0, 3.0], rffi.DOUBLE, init_result=0.0) assert res == 8.0 - self.check_loops(call_may_force=1, guard_no_exception=1, guard_not_forced=1) + self.check_loops(call_release_gil=1, guard_no_exception=1, guard_not_forced=1) def test_cast_result(self): """ @@ -185,7 +185,7 @@ func = (libfoo, 'cast_to_uchar_and_ovf', [types.sint], types.uchar) res = self.call(func, [0], rffi.UCHAR) assert res == 200 - self.check_loops(call_may_force=1, guard_no_exception=1, guard_not_forced=1) + self.check_loops(call_release_gil=1, guard_no_exception=1, guard_not_forced=1) def test_cast_argument(self): """ diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -807,6 +807,12 @@ raise NotImplementedError def op_call(self, calldescr, func, *args): + return self._do_call(calldescr, func, args, call_with_llptr=False) + + def op_call_release_gil(self, calldescr, func, *args): + return self._do_call(calldescr, func, args, call_with_llptr=True) + + def _do_call(self, calldescr, func, args, call_with_llptr): global _last_exception assert _last_exception is None, "exception left behind" assert _call_args_i == _call_args_r == _call_args_f == [] @@ -825,7 +831,8 @@ else: raise TypeError(x) try: - return _do_call_common(func, args_in_order, calldescr) + return _do_call_common(func, args_in_order, calldescr, + call_with_llptr) except LLException, lle: _last_exception = lle d = {'v': None, @@ -1452,17 +1459,20 @@ 'v': lltype.Void, } -def _do_call_common(f, args_in_order=None, calldescr=None): +def _do_call_common(f, args_in_order=None, calldescr=None, + call_with_llptr=False): ptr = llmemory.cast_int_to_adr(f).ptr PTR = lltype.typeOf(ptr) if PTR == rffi.VOIDP: # it's a pointer to a C function, so we don't have a precise # signature: create one from the descr + assert call_with_llptr is True ARGS = map(kind2TYPE.get, calldescr.arg_types) RESULT = kind2TYPE[calldescr.typeinfo] FUNC = lltype.FuncType(ARGS, RESULT) func_to_call = rffi.cast(lltype.Ptr(FUNC), ptr) else: + assert call_with_llptr is False FUNC = PTR.TO ARGS = FUNC.ARGS func_to_call = ptr._obj._callable diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -482,6 +482,7 @@ 'CALL_ASSEMBLER/*d', # call already compiled assembler 'CALL_MAY_FORCE/*d', 'CALL_LOOPINVARIANT/*d', + 'CALL_RELEASE_GIL/*d', # release the GIL and "close the stack" for asmgcc #'OOSEND', # ootype operation #'OOSEND_PURE', # ootype operation 'CALL_PURE/*d', # removed before it's passed to the backend diff --git a/pypy/jit/metainterp/test/test_optimizefficall.py b/pypy/jit/metainterp/test/test_optimizefficall.py --- a/pypy/jit/metainterp/test/test_optimizefficall.py +++ b/pypy/jit/metainterp/test/test_optimizefficall.py @@ -78,7 +78,7 @@ """ expected = """ [i0, f1] - i3 = call_may_force(12345, i0, f1, descr=int_float__int) + i3 = call_release_gil(12345, i0, f1, descr=int_float__int) guard_not_forced() [] guard_no_exception() [] jump(i3, f1) @@ -101,7 +101,7 @@ def test_handle_virtualizables(self): # this test needs an explanation to understand what goes on: see the - # coment in optimize_FORCE_TOKEN + # comment in optimize_FORCE_TOKEN ops = """ [i0, f1, p2] call(0, ConstPtr(func), descr=libffi_prepare) @@ -118,7 +118,7 @@ [i0, f1, p2] i4 = force_token() setfield_gc(p2, i4, descr=vable_token_descr) - i3 = call_may_force(12345, i0, f1, descr=int_float__int) + i3 = call_release_gil(12345, i0, f1, descr=int_float__int) guard_not_forced() [p2] guard_no_exception() [p2] jump(i3, f1, p2) @@ -215,7 +215,7 @@ call(0, ConstPtr(func), descr=libffi_prepare) # # this "nested" call is nicely optimized - i4 = call_may_force(67890, i0, f1, descr=int_float__int) + i4 = call_release_gil(67890, i0, f1, descr=int_float__int) guard_not_forced() [] guard_no_exception() [] # @@ -260,7 +260,7 @@ expected = """ [i0, f1, p2] setfield_gc(p2, i0, descr=valuedescr) - i3 = call_may_force(12345, i0, f1, descr=int_float__int) + i3 = call_release_gil(12345, i0, f1, descr=int_float__int) guard_not_forced() [] guard_no_exception() [] jump(i3, f1, p2) From commits-noreply at bitbucket.org Sun Mar 20 18:56:53 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 20 Mar 2011 18:56:53 +0100 (CET) Subject: [pypy-svn] pypy default: (alex, with review from arigo and antocuni) Improve function defaults by showing the JIT that the memory they reside in is constant, evne if function.func_defaults isn't Message-ID: <20110320175653.3E7B136C20C@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42806:b494ff677353 Date: 2011-03-20 13:56 -0400 http://bitbucket.org/pypy/pypy/changeset/b494ff677353/ Log: (alex, with review from arigo and antocuni) Improve function defaults by showing the JIT that the memory they reside in is constant, evne if function.func_defaults isn't diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py --- a/pypy/interpreter/function.py +++ b/pypy/interpreter/function.py @@ -21,6 +21,15 @@ assert not func.can_change_code return func.code +class Defaults(object): + _immutable_fields_ = ["items[*]"] + + def __init__(self, items): + self.items = items + + def getitems(self): + return jit.hint(self, promote=True).items + class Function(Wrappable): """A function is a code object captured with some environment: an object space, a dictionary of globals, default arguments, @@ -36,8 +45,7 @@ self.code = code # Code instance self.w_func_globals = w_globals # the globals dictionary self.closure = closure # normally, list of Cell instances or None - self.defs_w = defs_w # list of w_default's - make_sure_not_resized(self.defs_w) + self.defs = Defaults(defs_w) # wrapper around list of w_default's self.w_func_dict = None # filled out below if needed self.w_module = None @@ -87,7 +95,7 @@ assert isinstance(code, gateway.BuiltinCode4) return code.fastcall_4(self.space, self, args_w[0], args_w[1], args_w[2], args_w[3]) - elif (nargs|PyCode.FLATPYCALL) == fast_natural_arity: + elif (nargs | PyCode.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) if nargs < 5: new_frame = self.space.createframe(code, self.w_func_globals, @@ -129,15 +137,15 @@ return code.fastcall_4(self.space, self, frame.peekvalue(3), frame.peekvalue(2), frame.peekvalue(1), frame.peekvalue(0)) - elif (nargs|Code.FLATPYCALL) == fast_natural_arity: + elif (nargs | Code.FLATPYCALL) == fast_natural_arity: assert isinstance(code, PyCode) return self._flat_pycall(code, nargs, frame) - elif fast_natural_arity&Code.FLATPYCALL: - natural_arity = fast_natural_arity&0xff - if natural_arity > nargs >= natural_arity-len(self.defs_w): + elif fast_natural_arity & Code.FLATPYCALL: + natural_arity = fast_natural_arity & 0xff + if natural_arity > nargs >= natural_arity - len(self.defs.getitems()): assert isinstance(code, PyCode) return self._flat_pycall_defaults(code, nargs, frame, - natural_arity-nargs) + natural_arity - nargs) elif fast_natural_arity == Code.PASSTHROUGHARGS1 and nargs >= 1: assert isinstance(code, gateway.BuiltinCodePassThroughArguments1) w_obj = frame.peekvalue(nargs-1) @@ -167,9 +175,9 @@ w_arg = frame.peekvalue(nargs-1-i) new_frame.fastlocals_w[i] = w_arg - defs_w = self.defs_w + defs_w = self.defs.getitems() ndefs = len(defs_w) - start = ndefs-defs_to_load + start = ndefs - defs_to_load i = nargs for j in xrange(start, ndefs): new_frame.fastlocals_w[i] = defs_w[j] @@ -182,8 +190,10 @@ return self.w_func_dict def setdict(self, space, w_dict): - if not space.is_true(space.isinstance( w_dict, space.w_dict )): - raise OperationError( space.w_TypeError, space.wrap("setting function's dictionary to a non-dict") ) + if not space.isinstance_w(w_dict, space.w_dict): + raise OperationError(space.w_TypeError, + space.wrap("setting function's dictionary to a non-dict") + ) self.w_func_dict = w_dict def descr_function__new__(space, w_subtype, w_code, w_globals, @@ -286,7 +296,7 @@ w(self.code), w_func_globals, w_closure, - nt(self.defs_w), + nt(self.defs.getitems()), w_func_dict, self.w_module, ] @@ -296,7 +306,7 @@ from pypy.interpreter.pycode import PyCode args_w = space.unpackiterable(w_args) try: - (w_name, w_doc, w_code, w_func_globals, w_closure, w_defs_w, + (w_name, w_doc, w_code, w_func_globals, w_closure, w_defs, w_func_dict, w_module) = args_w except ValueError: # wrong args @@ -321,25 +331,28 @@ if space.is_w(w_func_dict, space.w_None): w_func_dict = None self.w_func_dict = w_func_dict - self.defs_w = space.fixedview(w_defs_w) + self.defs = Defaults(space.fixedview(w_defs)) self.w_module = w_module def fget_func_defaults(self, space): - values_w = self.defs_w + values_w = self.defs.getitems() + # the `None in values_w` check here is to ensure that interp-level + # functions with a default of NoneNotWrapped do not get their defaults + # exposed at applevel if not values_w or None in values_w: return space.w_None return space.newtuple(values_w) def fset_func_defaults(self, space, w_defaults): if space.is_w(w_defaults, space.w_None): - self.defs_w = [] + self.defs = Defaults([]) return if not space.is_true(space.isinstance(w_defaults, space.w_tuple)): raise OperationError( space.w_TypeError, space.wrap("func_defaults must be set to a tuple object or None") ) - self.defs_w = space.fixedview(w_defaults) + self.defs = Defaults(space.fixedview(w_defaults)) def fdel_func_defaults(self, space): - self.defs_w = [] + self.defs = Defaults([]) def fget_func_doc(self, space): if self.w_doc is None: @@ -369,7 +382,7 @@ def fget___module__(self, space): if self.w_module is None: if self.w_func_globals is not None and not space.is_w(self.w_func_globals, space.w_None): - self.w_module = space.call_method( self.w_func_globals, "get", space.wrap("__name__") ) + self.w_module = space.call_method(self.w_func_globals, "get", space.wrap("__name__")) else: self.w_module = space.w_None return self.w_module @@ -601,7 +614,7 @@ def __init__(self, func): assert isinstance(func, Function) Function.__init__(self, func.space, func.code, func.w_func_globals, - func.defs_w, func.closure, func.name) + func.defs.getitems(), func.closure, func.name) self.w_doc = func.w_doc self.w_func_dict = func.w_func_dict self.w_module = func.w_module diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -86,7 +86,7 @@ self._init_flags() # Precompute what arguments need to be copied into cellvars self._args_as_cellvars = [] - + if self.co_cellvars: argcount = self.co_argcount assert argcount >= 0 # annotator hint @@ -146,7 +146,7 @@ def signature(self): return self._signature - + @classmethod def _from_code(cls, space, code, hidden_applevel=False, code_hook=None): """ Initialize the code object from a real (CPython) one. @@ -182,7 +182,7 @@ list(code.co_cellvars), hidden_applevel, cpython_magic) - + def _compute_flatcall(self): # Speed hack! self.fast_natural_arity = eval.Code.HOPELESS @@ -192,7 +192,7 @@ return if self.co_argcount > 0xff: return - + self.fast_natural_arity = eval.Code.FLATPYCALL | self.co_argcount def funcrun(self, func, args): @@ -204,7 +204,7 @@ fresh_virtualizable=True) args_matched = args.parse_into_scope(None, fresh_frame.fastlocals_w, func.name, - sig, func.defs_w) + sig, func.defs.getitems()) fresh_frame.init_cells() return frame.run() @@ -214,10 +214,10 @@ sig = self._signature # speed hack fresh_frame = jit.hint(frame, access_directly=True, - fresh_virtualizable=True) + fresh_virtualizable=True) args_matched = args.parse_into_scope(w_obj, fresh_frame.fastlocals_w, func.name, - sig, func.defs_w) + sig, func.defs.getitems()) fresh_frame.init_cells() return frame.run() @@ -269,7 +269,7 @@ def fget_co_consts(self, space): return space.newtuple(self.co_consts_w) - + def fget_co_names(self, space): return space.newtuple(self.co_names_w) @@ -280,7 +280,7 @@ return space.newtuple([space.wrap(name) for name in self.co_cellvars]) def fget_co_freevars(self, space): - return space.newtuple([space.wrap(name) for name in self.co_freevars]) + return space.newtuple([space.wrap(name) for name in self.co_freevars]) def descr_code__eq__(self, w_other): space = self.space @@ -372,18 +372,18 @@ new_inst = mod.get('code_new') w = space.wrap tup = [ - w(self.co_argcount), - w(self.co_nlocals), - w(self.co_stacksize), + w(self.co_argcount), + w(self.co_nlocals), + w(self.co_stacksize), w(self.co_flags), - w(self.co_code), - space.newtuple(self.co_consts_w), - space.newtuple(self.co_names_w), - space.newtuple([w(v) for v in self.co_varnames]), + w(self.co_code), + space.newtuple(self.co_consts_w), + space.newtuple(self.co_names_w), + space.newtuple([w(v) for v in self.co_varnames]), w(self.co_filename), - w(self.co_name), + w(self.co_name), w(self.co_firstlineno), - w(self.co_lnotab), + w(self.co_lnotab), space.newtuple([w(v) for v in self.co_freevars]), space.newtuple([w(v) for v in self.co_cellvars]), w(self.magic), diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py --- a/pypy/interpreter/gateway.py +++ b/pypy/interpreter/gateway.py @@ -13,7 +13,7 @@ NoneNotWrapped = object() from pypy.tool.sourcetools import func_with_new_name -from pypy.interpreter.error import OperationError +from pypy.interpreter.error import OperationError from pypy.interpreter import eval from pypy.interpreter.function import Function, Method, ClassMethod from pypy.interpreter.function import FunctionWithFixedCode @@ -25,7 +25,7 @@ from pypy.rlib import rstackovf from pypy.rlib.objectmodel import we_are_translated -# internal non-translatable parts: +# internal non-translatable parts: import py class SignatureBuilder(object): @@ -78,13 +78,13 @@ dispatch = self.dispatch for el in unwrap_spec: dispatch(el, *extra) - + class UnwrapSpecEmit(UnwrapSpecRecipe): def __init__(self): self.n = 0 self.miniglobals = {} - + def succ(self): n = self.n self.n += 1 @@ -94,7 +94,7 @@ name = obj.__name__ self.miniglobals[name] = obj return name - + #________________________________________________________________ class UnwrapSpec_Check(UnwrapSpecRecipe): @@ -147,7 +147,7 @@ "unwrapped %s argument %s of built-in function %r should " "not start with 'w_'" % (name, argname, self.func)) app_sig.append(argname) - + def visit__ObjSpace(self, el, app_sig): self.orig_arg() @@ -173,7 +173,7 @@ (argname, self.func)) assert app_sig.varargname is None,( "built-in function %r has conflicting rest args specs" % self.func) - app_sig.varargname = argname[:-2] + app_sig.varargname = argname[:-2] def visit_w_args(self, el, app_sig): argname = self.orig_arg() @@ -199,7 +199,7 @@ def scopenext(self): return "scope_w[%d]" % self.succ() - + def visit_function(self, (func, cls)): self.run_args.append("%s(%s)" % (self.use(func), self.scopenext())) @@ -207,7 +207,7 @@ def visit_self(self, typ): self.run_args.append("space.descr_self_interp_w(%s, %s)" % (self.use(typ), self.scopenext())) - + def visit__Wrappable(self, typ): self.run_args.append("space.interp_w(%s, %s)" % (self.use(typ), self.scopenext())) @@ -265,7 +265,7 @@ "unexpected: same spec, different run_args") return activation_factory_cls except KeyError: - parts = [] + parts = [] for el in unwrap_spec: if isinstance(el, tuple): parts.append(''.join([getattr(subel, '__name__', subel) @@ -276,7 +276,7 @@ #print label d = {} - source = """if 1: + source = """if 1: def _run(self, space, scope_w): return self.behavior(%s) \n""" % (', '.join(self.run_args),) @@ -326,7 +326,7 @@ self.finger += 1 if self.n > 4: raise FastFuncNotSupported - + def nextarg(self): arg = "w%d" % self.succ() self.args.append(arg) @@ -405,7 +405,7 @@ raise FastFuncNotSupported d = {} unwrap_info.miniglobals['func'] = func - source = """if 1: + source = """if 1: def fastfunc_%s_%d(%s): return func(%s) \n""" % (func.__name__, narg, @@ -511,7 +511,7 @@ # 'w_args' for rest arguments passed as wrapped tuple # str,int,float: unwrap argument as such type # (function, cls) use function to check/unwrap argument of type cls - + # First extract the signature from the (CPython-level) code object from pypy.interpreter import pycode argnames, varargname, kwargname = pycode.cpython_code_signature(func.func_code) @@ -532,7 +532,7 @@ else: assert descrmismatch is None, ( "descrmismatch without a self-type specified") - + orig_sig = SignatureBuilder(func, argnames, varargname, kwargname) app_sig = SignatureBuilder(func) @@ -594,7 +594,7 @@ space = func.space activation = self.activation scope_w = args.parse_obj(w_obj, func.name, self.sig, - func.defs_w, self.minargs) + func.defs.getitems(), self.minargs) try: w_result = activation._run(space, scope_w) except DescrMismatch: @@ -615,10 +615,10 @@ if not we_are_translated(): raise raise e - except KeyboardInterrupt: + except KeyboardInterrupt: raise OperationError(space.w_KeyboardInterrupt, - space.w_None) - except MemoryError: + space.w_None) + except MemoryError: raise OperationError(space.w_MemoryError, space.w_None) except rstackovf.StackOverflow, e: rstackovf.check_stack_overflow() @@ -668,7 +668,7 @@ class BuiltinCode0(BuiltinCode): _immutable_ = True fast_natural_arity = 0 - + def fastcall_0(self, space, w_func): try: w_result = self.fastfunc_0(space) @@ -684,7 +684,7 @@ class BuiltinCode1(BuiltinCode): _immutable_ = True fast_natural_arity = 1 - + def fastcall_1(self, space, w_func, w1): try: w_result = self.fastfunc_1(space, w1) @@ -702,7 +702,7 @@ class BuiltinCode2(BuiltinCode): _immutable_ = True fast_natural_arity = 2 - + def fastcall_2(self, space, w_func, w1, w2): try: w_result = self.fastfunc_2(space, w1, w2) @@ -720,7 +720,7 @@ class BuiltinCode3(BuiltinCode): _immutable_ = True fast_natural_arity = 3 - + def fastcall_3(self, space, func, w1, w2, w3): try: w_result = self.fastfunc_3(space, w1, w2, w3) @@ -738,7 +738,7 @@ class BuiltinCode4(BuiltinCode): _immutable_ = True fast_natural_arity = 4 - + def fastcall_4(self, space, func, w1, w2, w3, w4): try: w_result = self.fastfunc_4(space, w1, w2, w3, w4) @@ -770,7 +770,7 @@ NOT_RPYTHON_ATTRIBUTES = ['_staticdefs'] instancecache = {} - + def __new__(cls, f, app_name=None, unwrap_spec = None, descrmismatch=None, as_classmethod=False): @@ -846,17 +846,17 @@ return fn -# -# the next gateways are to be used only for -# temporary/initialization purposes - -class interp2app_temp(interp2app): +# +# the next gateways are to be used only for +# temporary/initialization purposes + +class interp2app_temp(interp2app): "NOT_RPYTHON" - def getcache(self, space): + def getcache(self, space): return self.__dict__.setdefault(space, GatewayCache(space)) -# and now for something completely different ... +# and now for something completely different ... # class ApplevelClass: @@ -896,14 +896,14 @@ from pypy.interpreter.module import Module return Module(space, space.wrap(name), self.getwdict(space)) - def wget(self, space, name): - w_globals = self.getwdict(space) + def wget(self, space, name): + w_globals = self.getwdict(space) return space.getitem(w_globals, space.wrap(name)) def interphook(self, name): "NOT_RPYTHON" def appcaller(space, *args_w): - if not isinstance(space, ObjSpace): + if not isinstance(space, ObjSpace): raise TypeError("first argument must be a space instance.") # redirect if the space handles this specially # XXX can this be factored a bit less flow space dependently? @@ -932,7 +932,7 @@ args.arguments_w) return space.call_args(w_func, args) def get_function(space): - w_func = self.wget(space, name) + w_func = self.wget(space, name) return space.unwrap(w_func) appcaller = func_with_new_name(appcaller, name) appcaller.get_function = get_function @@ -1123,15 +1123,15 @@ myfunc = appdef('''myfunc(x, y): return x+y ''') - """ - if not isinstance(source, str): + """ + if not isinstance(source, str): source = py.std.inspect.getsource(source).lstrip() while source.startswith('@py.test.mark.'): # these decorators are known to return the same function # object, we may ignore them assert '\n' in source source = source[source.find('\n') + 1:].lstrip() - assert source.startswith("def "), "can only transform functions" + assert source.startswith("def "), "can only transform functions" source = source[4:] p = source.find('(') assert p >= 0 diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -730,7 +730,7 @@ where x and y can be either constants or variables. There are cases in which the second guard is proven to be always true. """ - + for a, b, res, opt_expected in (('2000', '2000', 20001000, True), ( '500', '500', 15001500, True), ( '300', '600', 16001700, False), @@ -830,7 +830,7 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - ops = ('<', '>', '<=', '>=', '==', '!=') + ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): @@ -880,7 +880,7 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - ops = ('<', '>', '<=', '>=', '==', '!=') + ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): @@ -1008,3 +1008,33 @@ --TICK-- jump(p0, p1, p2, p3, p4, p5, p6, p7, i28, i15, i10, i11, descr=) """) + + def test_func_defaults(self): + def main(n): + i = 1 + while i < n: + i += len(xrange(i)) / i + return i + + log = self.run(main, [10000]) + assert log.result == 10000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i10 = int_lt(i5, i6) + guard_true(i10, descr=) + # This can be improved if the JIT realized the lookup of i5 produces + # a constant and thus can be removed entirely + i12 = int_sub(i5, 1) + i13 = uint_floordiv(i12, i7) + i15 = int_add(i13, 1) + i17 = int_lt(i15, 0) + guard_false(i17, descr=) + i18 = int_floordiv(i15, i5) + i19 = int_xor(i15, i5) + i20 = int_mod(i15, i5) + i21 = int_is_true(i20) + i22 = int_add_ovf(i5, i18) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, i22, i6, i7, p8, p9, descr=) + """) \ No newline at end of file diff --git a/pypy/interpreter/test/test_gateway.py b/pypy/interpreter/test/test_gateway.py --- a/pypy/interpreter/test/test_gateway.py +++ b/pypy/interpreter/test/test_gateway.py @@ -2,20 +2,19 @@ # -*- coding: utf-8 -*- from pypy.conftest import gettestobjspace -from pypy.interpreter import gateway +from pypy.interpreter import gateway, argument from pypy.interpreter.gateway import ObjSpace, W_Root -from pypy.interpreter import argument +from pypy.interpreter.function import Defaults import py import sys class FakeFunc(object): - def __init__(self, space, name): self.space = space self.name = name - self.defs_w = [] + self.defs = Defaults([]) -class TestBuiltinCode: +class TestBuiltinCode: def test_signature(self): def c(space, w_x, w_y, hello_w): pass @@ -87,7 +86,7 @@ w_result = code.funcrun(FakeFunc(self.space, "c"), args) assert self.space.eq_w(w_result, w(1020)) -class TestGateway: +class TestGateway: def test_app2interp(self): w = self.space.wrap @@ -102,7 +101,7 @@ return a+b g3 = gateway.app2interp_temp(noapp_g3, gateway.applevel_temp) assert self.space.eq_w(g3(self.space, w('foo'), w('bar')), w('foobar')) - + def test_app2interp2(self): """same but using transformed code""" w = self.space.wrap @@ -129,28 +128,28 @@ def g3(space, w_a, w_b): return space.add(w_a, w_b) app_g3 = gateway.interp2app_temp(g3) - w_app_g3 = space.wrap(app_g3) + w_app_g3 = space.wrap(app_g3) assert self.space.eq_w( - space.call(w_app_g3, + space.call(w_app_g3, space.newtuple([w('foo'), w('bar')]), space.newdict()), w('foobar')) assert self.space.eq_w( space.call_function(w_app_g3, w('foo'), w('bar')), w('foobar')) - + def test_interp2app_unwrap_spec(self): space = self.space w = space.wrap def g3(space, w_a, w_b): - return space.add(w_a, w_b) + return space.add(w_a, w_b) app_g3 = gateway.interp2app_temp(g3, unwrap_spec=[gateway.ObjSpace, gateway.W_Root, gateway.W_Root]) - w_app_g3 = space.wrap(app_g3) + w_app_g3 = space.wrap(app_g3) assert self.space.eq_w( - space.call(w_app_g3, + space.call(w_app_g3, space.newtuple([w('foo'), w('bar')]), space.newdict()), w('foobar')) @@ -188,7 +187,7 @@ app_A = gateway.interp2app(A.f) app_B = gateway.interp2app(B.f) assert app_A is not app_B - + def test_interp2app_unwrap_spec_nonnegint(self): space = self.space w = space.wrap @@ -256,13 +255,13 @@ space = self.space w = space.wrap def g3_args_w(space, args_w): - return space.add(args_w[0], args_w[1]) + return space.add(args_w[0], args_w[1]) app_g3_args_w = gateway.interp2app_temp(g3_args_w, unwrap_spec=[gateway.ObjSpace, 'args_w']) - w_app_g3_args_w = space.wrap(app_g3_args_w) + w_app_g3_args_w = space.wrap(app_g3_args_w) assert self.space.eq_w( - space.call(w_app_g3_args_w, + space.call(w_app_g3_args_w, space.newtuple([w('foo'), w('bar')]), space.newdict()), w('foobar')) @@ -276,13 +275,13 @@ def g3_ss(space, s0, s1): if s1 is None: return space.wrap(42) - return space.wrap(s0+s1) + return space.wrap(s0+s1) app_g3_ss = gateway.interp2app_temp(g3_ss, unwrap_spec=[gateway.ObjSpace, str, 'str_or_None']) - w_app_g3_ss = space.wrap(app_g3_ss) + w_app_g3_ss = space.wrap(app_g3_ss) assert self.space.eq_w( - space.call(w_app_g3_ss, + space.call(w_app_g3_ss, space.newtuple([w('foo'), w('bar')]), space.newdict()), w('foobar')) @@ -299,13 +298,13 @@ space = self.space w = space.wrap def g3_if(space, i0, f1): - return space.wrap(i0+f1) + return space.wrap(i0+f1) app_g3_if = gateway.interp2app_temp(g3_if, unwrap_spec=[gateway.ObjSpace, int,float]) - w_app_g3_if = space.wrap(app_g3_if) + w_app_g3_if = space.wrap(app_g3_if) assert self.space.eq_w( - space.call(w_app_g3_if, + space.call(w_app_g3_if, space.newtuple([w(1), w(1.0)]), space.newdict()), w(2.0)) @@ -324,7 +323,7 @@ w_app_g3_ll = space.wrap(app_g3_ll) w_big = w(gateway.r_longlong(10**10)) assert space.eq_w( - space.call(w_app_g3_ll, + space.call(w_app_g3_ll, space.newtuple([w_big]), space.newdict()), w(gateway.r_longlong(3 * 10**10))) @@ -381,7 +380,7 @@ app_g3_idx = gateway.interp2app_temp(g3_idx, unwrap_spec=[gateway.ObjSpace, 'index']) - w_app_g3_idx = space.wrap(app_g3_idx) + w_app_g3_idx = space.wrap(app_g3_idx) assert space.eq_w( space.call_function(w_app_g3_idx, w(123)), w(124)) @@ -404,7 +403,7 @@ int]) w_app_g3_i = space.wrap(app_g3_i) assert space.eq_w(space.call_function(w_app_g3_i,w(1)),w(1)) - assert space.eq_w(space.call_function(w_app_g3_i,w(1L)),w(1)) + assert space.eq_w(space.call_function(w_app_g3_i,w(1L)),w(1)) raises(gateway.OperationError,space.call_function,w_app_g3_i,w(sys.maxint*2)) raises(gateway.OperationError,space.call_function,w_app_g3_i,w(None)) raises(gateway.OperationError,space.call_function,w_app_g3_i,w("foo")) @@ -418,14 +417,14 @@ raises(gateway.OperationError,space.call_function,w_app_g3_s,w(None)) raises(gateway.OperationError,space.call_function,w_app_g3_s,w(1)) raises(gateway.OperationError,space.call_function,w_app_g3_s,w(1.0)) - + app_g3_f = gateway.interp2app_temp(g3_id, unwrap_spec=[gateway.ObjSpace, float]) w_app_g3_f = space.wrap(app_g3_f) assert space.eq_w(space.call_function(w_app_g3_f,w(1.0)),w(1.0)) assert space.eq_w(space.call_function(w_app_g3_f,w(1)),w(1.0)) - assert space.eq_w(space.call_function(w_app_g3_f,w(1L)),w(1.0)) + assert space.eq_w(space.call_function(w_app_g3_f,w(1L)),w(1.0)) raises(gateway.OperationError,space.call_function,w_app_g3_f,w(None)) raises(gateway.OperationError,space.call_function,w_app_g3_f,w("foo")) @@ -540,8 +539,8 @@ called.append(w_func) return fastcall_2(space, w_func, w_a, w_b) - w_app_f.code.fastcall_2 = witness_fastcall_2 - + w_app_f.code.fastcall_2 = witness_fastcall_2 + w_res = space.appexec([w_app_f, w_3], """(f, x): class A(object): m = f # not a builtin function, so works as method @@ -552,8 +551,8 @@ """) assert space.is_true(w_res) - assert called == [w_app_f, w_app_f] - + assert called == [w_app_f, w_app_f] + def test_plain(self): space = self.space @@ -569,7 +568,7 @@ w_res = space.call_args(w_g, args) assert space.is_true(space.eq(w_res, space.wrap(('g', -1, 0)))) - + w_self = space.wrap('self') args0 = argument.Arguments(space, [space.wrap(0)]) @@ -607,12 +606,12 @@ class TestPassThroughArguments: - + def test_pass_trough_arguments0(self): space = self.space called = [] - + def f(space, __args__): called.append(__args__) a_w, _ = __args__.unpack() @@ -626,7 +625,7 @@ w_res = space.call_args(w_f, args) assert space.is_true(space.eq(w_res, space.wrap(('f', 7)))) - + # white-box check for opt assert called[0] is args @@ -634,7 +633,7 @@ space = self.space called = [] - + def g(space, w_self, __args__): called.append(__args__) a_w, _ = __args__.unpack() @@ -668,7 +667,7 @@ w_res = space.call_args(w_g, args) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 0)))) # no opt in this case - assert len(called) == 2 + assert len(called) == 2 assert called[0] == 'funcrun' called = [] @@ -677,15 +676,15 @@ w_res = space.call_function(w_g, w_self) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self')))) assert len(called) == 1 - assert isinstance(called[0], argument.Arguments) + assert isinstance(called[0], argument.Arguments) called = [] - + w_res = space.appexec([w_g], """(g): return g('self', 11) """) assert space.is_true(space.eq(w_res, space.wrap(('g', 'self', 11)))) assert len(called) == 1 - assert isinstance(called[0], argument.Arguments) + assert isinstance(called[0], argument.Arguments) called = [] w_res = space.appexec([w_g], """(g): @@ -736,7 +735,7 @@ clash = dict.__new__.func_code.co_varnames[0] dict(**{clash: 33}) - dict.__new__(dict, **{clash: 33}) + dict.__new__(dict, **{clash: 33}) def test_dict_init(self): d = {} @@ -750,5 +749,5 @@ clash = dict.update.func_code.co_varnames[0] d.update(**{clash: 33}) - dict.update(d, **{clash: 33}) - + dict.update(d, **{clash: 33}) + diff --git a/pypy/objspace/std/fake.py b/pypy/objspace/std/fake.py --- a/pypy/objspace/std/fake.py +++ b/pypy/objspace/std/fake.py @@ -144,10 +144,10 @@ frame = func.space.createframe(self, func.w_func_globals, func.closure) sig = self.signature() - scope_w = args.parse_obj(None, func.name, sig, func.defs_w) + scope_w = args.parse_obj(None, func.name, sig, func.defs.getitems()) frame.setfastscope(scope_w) return frame.run() - + class CPythonFakeFrame(eval.Frame): From commits-noreply at bitbucket.org Sun Mar 20 18:56:53 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 20 Mar 2011 18:56:53 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110320175653.93021282B90@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42807:955e244122b9 Date: 2011-03-20 13:56 -0400 http://bitbucket.org/pypy/pypy/changeset/955e244122b9/ Log: merged upstream From commits-noreply at bitbucket.org Sun Mar 20 19:12:25 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Sun, 20 Mar 2011 19:12:25 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: random notes for the subrepo removal Message-ID: <20110320181225.1C78636C202@codespeak.net> Author: Ronny Pfannschmidt Branch: extradoc Changeset: r3379:fd00d445187a Date: 2011-03-20 19:12 +0100 http://bitbucket.org/pypy/extradoc/changeset/fd00d445187a/ Log: random notes for the subrepo removal diff --git a/planning/hg-migration/subrepo_removal.txt b/planning/hg-migration/subrepo_removal.txt new file mode 100644 --- /dev/null +++ b/planning/hg-migration/subrepo_removal.txt @@ -0,0 +1,7 @@ + +Todo +===== + +* pyrepl release? +* greenlet sync? +* experimentation branch! From commits-noreply at bitbucket.org Sun Mar 20 19:21:31 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Sun, 20 Mar 2011 19:21:31 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: branch that kills the subrepos for using dependencies instead Message-ID: <20110320182131.9EDBE36C202@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42808:7b56ae2ba1ae Date: 2011-03-20 19:15 +0100 http://bitbucket.org/pypy/pypy/changeset/7b56ae2ba1ae/ Log: branch that kills the subrepos for using dependencies instead diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl From commits-noreply at bitbucket.org Mon Mar 21 08:42:08 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 08:42:08 +0100 (CET) Subject: [pypy-svn] pypy default: (moguri) Implement PyRun_SimpleString in cpyext Message-ID: <20110321074208.B1367282BAD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42809:b418623246c9 Date: 2011-03-21 08:40 +0100 http://bitbucket.org/pypy/pypy/changeset/b418623246c9/ Log: (moguri) Implement PyRun_SimpleString in cpyext diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -3072,12 +3072,6 @@ "???" as the filename.""" raise NotImplementedError - at cpython_api([rffi.CCHARP], rffi.INT_real, error=-1) -def PyRun_SimpleString(space, command): - """This is a simplified interface to PyRun_SimpleStringFlags() below, - leaving the PyCompilerFlags* argument set to NULL.""" - raise NotImplementedError - @cpython_api([rffi.CCHARP, PyCompilerFlags], rffi.INT_real, error=-1) def PyRun_SimpleStringFlags(space, command, flags): """Executes the Python source code from command in the __main__ module diff --git a/pypy/module/cpyext/eval.py b/pypy/module/cpyext/eval.py --- a/pypy/module/cpyext/eval.py +++ b/pypy/module/cpyext/eval.py @@ -84,6 +84,15 @@ w_code = compiling.compile(space, w_source, filename, mode) return compiling.eval(space, w_code, w_globals, w_locals) + at cpython_api([CONST_STRING], rffi.INT_real, error=-1) +def PyRun_SimpleString(space, command): + """This is a simplified interface to PyRun_SimpleStringFlags() below, + leaving the PyCompilerFlags* argument set to NULL.""" + command = rffi.charp2str(command) + run_string(space, command, "", Py_file_input, + space.w_None, space.w_None) + return 0 + @cpython_api([CONST_STRING, rffi.INT_real,PyObject, PyObject], PyObject) def PyRun_String(space, source, start, w_globals, w_locals): """This is a simplified interface to PyRun_StringFlags() below, leaving diff --git a/pypy/module/cpyext/test/test_eval.py b/pypy/module/cpyext/test/test_eval.py --- a/pypy/module/cpyext/test/test_eval.py +++ b/pypy/module/cpyext/test/test_eval.py @@ -63,6 +63,21 @@ assert space.int_w(w_res) == 10 + def test_run_simple_string(self, space, api): + def run(code): + buf = rffi.str2charp(code) + try: + return api.PyRun_SimpleString(buf) + finally: + rffi.free_charp(buf) + + assert 0 == run("42 * 43") + + assert -1 == run("4..3 * 43") + + assert api.PyErr_Occurred() + api.PyErr_Clear() + def test_run_string(self, space, api): def run(code, start, w_globals, w_locals): buf = rffi.str2charp(code) From commits-noreply at bitbucket.org Mon Mar 21 14:06:22 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 14:06:22 +0100 (CET) Subject: [pypy-svn] pypy default: Try to propogate the immutability of defaults through the code more, fixes the 3 failing pypy_c tests Message-ID: <20110321130622.B1A06282BD4@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42810:ddca225a0078 Date: 2011-03-21 09:05 -0400 http://bitbucket.org/pypy/pypy/changeset/ddca225a0078/ Log: Try to propogate the immutability of defaults through the code more, fixes the 3 failing pypy_c tests diff --git a/pypy/objspace/std/listobject.py b/pypy/objspace/std/listobject.py --- a/pypy/objspace/std/listobject.py +++ b/pypy/objspace/std/listobject.py @@ -8,6 +8,7 @@ from pypy.objspace.std import slicetype from pypy.interpreter import gateway, baseobjspace +from pypy.interpreter.function import Defaults from pypy.rlib.listsort import TimSort from pypy.interpreter.argument import Signature @@ -32,7 +33,7 @@ init_signature = Signature(['sequence'], None, None) -init_defaults = [None] +init_defaults = Defaults([None]) def init__List(space, w_list, __args__): # this is on the silly side diff --git a/pypy/interpreter/function.py b/pypy/interpreter/function.py --- a/pypy/interpreter/function.py +++ b/pypy/interpreter/function.py @@ -30,6 +30,12 @@ def getitems(self): return jit.hint(self, promote=True).items + def getitem(self, idx): + return self.getitems()[idx] + + def getlen(self): + return len(self.getitems()) + class Function(Wrappable): """A function is a code object captured with some environment: an object space, a dictionary of globals, default arguments, @@ -142,7 +148,7 @@ return self._flat_pycall(code, nargs, frame) elif fast_natural_arity & Code.FLATPYCALL: natural_arity = fast_natural_arity & 0xff - if natural_arity > nargs >= natural_arity - len(self.defs.getitems()): + if natural_arity > nargs >= natural_arity - self.defs.getlen(): assert isinstance(code, PyCode) return self._flat_pycall_defaults(code, nargs, frame, natural_arity - nargs) @@ -175,12 +181,12 @@ w_arg = frame.peekvalue(nargs-1-i) new_frame.fastlocals_w[i] = w_arg - defs_w = self.defs.getitems() - ndefs = len(defs_w) + defs = self.defs + ndefs = defs.getlen() start = ndefs - defs_to_load i = nargs for j in xrange(start, ndefs): - new_frame.fastlocals_w[i] = defs_w[j] + new_frame.fastlocals_w[i] = defs.getitem(j) i += 1 return new_frame.run() diff --git a/pypy/interpreter/argument.py b/pypy/interpreter/argument.py --- a/pypy/interpreter/argument.py +++ b/pypy/interpreter/argument.py @@ -77,13 +77,13 @@ if i == 2: return self.kwargname raise IndexError - + class Arguments(object): """ Collects the arguments of a function call. - + Instances should be considered immutable. """ @@ -146,7 +146,7 @@ self._combine_starstarargs_wrapped(w_starstararg) def _combine_starargs_wrapped(self, w_stararg): - # unpack the * arguments + # unpack the * arguments space = self.space try: args_w = space.fixedview(w_stararg) @@ -236,10 +236,10 @@ if self.arguments_w: return self.arguments_w[0] return None - + ### Parsing for function calls ### - def _match_signature(self, w_firstarg, scope_w, signature, defaults_w=[], + def _match_signature(self, w_firstarg, scope_w, signature, defaults=None, blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. @@ -247,19 +247,19 @@ """ if jit.we_are_jitted() and self._dont_jit: return self._match_signature_jit_opaque(w_firstarg, scope_w, - signature, defaults_w, + signature, defaults, blindargs) return self._really_match_signature(w_firstarg, scope_w, signature, - defaults_w, blindargs) + defaults, blindargs) @jit.dont_look_inside def _match_signature_jit_opaque(self, w_firstarg, scope_w, signature, - defaults_w, blindargs): + defaults, blindargs): return self._really_match_signature(w_firstarg, scope_w, signature, - defaults_w, blindargs) + defaults, blindargs) @jit.unroll_safe - def _really_match_signature(self, w_firstarg, scope_w, signature, defaults_w=[], + def _really_match_signature(self, w_firstarg, scope_w, signature, defaults=None, blindargs=0): # # args_w = list of the normal actual parameters, wrapped @@ -283,10 +283,10 @@ scope_w[0] = w_firstarg input_argcount = 1 else: - extravarargs = [ w_firstarg ] + extravarargs = [w_firstarg] else: upfront = 0 - + args_w = self.arguments_w num_args = len(args_w) @@ -327,7 +327,7 @@ elif avail > co_argcount: raise ArgErrCount(avail, num_kwds, co_argcount, has_vararg, has_kwarg, - defaults_w, 0) + defaults, 0) # the code assumes that keywords can potentially be large, but that # argnames is typically not too large @@ -357,12 +357,12 @@ num_remainingkwds -= 1 missing = 0 if input_argcount < co_argcount: - def_first = co_argcount - len(defaults_w) + def_first = co_argcount - (0 if defaults is None else defaults.getlen()) for i in range(input_argcount, co_argcount): if scope_w[i] is not None: pass elif i >= def_first: - scope_w[i] = defaults_w[i-def_first] + scope_w[i] = defaults.getitem(i - def_first) else: # error: not enough arguments. Don't signal it immediately # because it might be related to a problem with */** or @@ -382,20 +382,20 @@ if co_argcount == 0: raise ArgErrCount(avail, num_kwds, co_argcount, has_vararg, has_kwarg, - defaults_w, missing) + defaults, missing) raise ArgErrUnknownKwds(num_remainingkwds, keywords, used_keywords) if missing: raise ArgErrCount(avail, num_kwds, co_argcount, has_vararg, has_kwarg, - defaults_w, missing) + defaults, missing) return co_argcount + has_vararg + has_kwarg - + def parse_into_scope(self, w_firstarg, - scope_w, fnname, signature, defaults_w=[]): + scope_w, fnname, signature, defaults=None): """Parse args and kwargs to initialize a frame according to the signature of code object. Store the argumentvalues into scope_w. @@ -403,32 +403,32 @@ """ try: return self._match_signature(w_firstarg, - scope_w, signature, defaults_w, 0) + scope_w, signature, defaults, 0) except ArgErr, e: raise OperationError(self.space.w_TypeError, self.space.wrap(e.getmsg(fnname))) - def _parse(self, w_firstarg, signature, defaults_w, blindargs=0): + def _parse(self, w_firstarg, signature, defaults, blindargs=0): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. """ scopelen = signature.scope_length() scope_w = [None] * scopelen - self._match_signature(w_firstarg, scope_w, signature, defaults_w, + self._match_signature(w_firstarg, scope_w, signature, defaults, blindargs) - return scope_w + return scope_w def parse_obj(self, w_firstarg, - fnname, signature, defaults_w=[], blindargs=0): + fnname, signature, defaults=None, blindargs=0): """Parse args and kwargs to initialize a frame according to the signature of code object. """ try: - return self._parse(w_firstarg, signature, defaults_w, blindargs) + return self._parse(w_firstarg, signature, defaults, blindargs) except ArgErr, e: raise OperationError(self.space.w_TypeError, - self.space.wrap(e.getmsg(fnname))) + self.space.wrap(e.getmsg(fnname))) @staticmethod def frompacked(space, w_args=None, w_kwds=None): @@ -473,24 +473,24 @@ self.w_starstararg) - - def _match_signature(self, w_firstarg, scope_w, signature, defaults_w=[], + + def _match_signature(self, w_firstarg, scope_w, signature, defaults=None, blindargs=0): self.combine_if_necessary() # _match_signature is destructive return Arguments._match_signature( self, w_firstarg, scope_w, signature, - defaults_w, blindargs) + defaults, blindargs) def unpack(self): self.combine_if_necessary() return Arguments.unpack(self) - def match_signature(self, signature, defaults_w): + def match_signature(self, signature, defaults): """Parse args and kwargs according to the signature of a code object, or raise an ArgErr in case of failure. """ - return self._parse(None, signature, defaults_w) + return self._parse(None, signature, defaults) def unmatch_signature(self, signature, data_w): """kind of inverse of match_signature""" @@ -513,10 +513,10 @@ for w_key in space.unpackiterable(data_w_starargarg): key = space.str_w(w_key) w_value = space.getitem(data_w_starargarg, w_key) - unfiltered_kwds_w[key] = w_value + unfiltered_kwds_w[key] = w_value cnt += 1 assert len(data_w) == cnt - + ndata_args_w = len(data_args_w) if ndata_args_w >= need_cnt: args_w = data_args_w[:need_cnt] @@ -532,19 +532,19 @@ for i in range(0, len(stararg_w)): args_w[i + datalen] = stararg_w[i] assert len(args_w) == need_cnt - + keywords = [] keywords_w = [] for key in need_kwds: keywords.append(key) keywords_w.append(unfiltered_kwds_w[key]) - + return ArgumentsForTranslation(self.space, args_w, keywords, keywords_w) @staticmethod def frompacked(space, w_args=None, w_kwds=None): raise NotImplementedError("go away") - + @staticmethod def fromshape(space, (shape_cnt,shape_keys,shape_star,shape_stst), data_w): args_w = data_w[:shape_cnt] @@ -596,23 +596,23 @@ # class ArgErr(Exception): - + def getmsg(self, fnname): raise NotImplementedError class ArgErrCount(ArgErr): def __init__(self, got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg, - defaults_w, missing_args): + defaults, missing_args): self.expected_nargs = expected_nargs self.has_vararg = has_vararg self.has_kwarg = has_kwarg - - self.num_defaults = len(defaults_w) + + self.num_defaults = 0 if defaults is None else defaults.getlen() self.missing_args = missing_args self.num_args = got_nargs self.num_kwds = nkwds - + def getmsg(self, fnname): args = None #args_w, kwds_w = args.unpack() @@ -620,7 +620,7 @@ n = self.expected_nargs if n == 0: msg = "%s() takes no argument (%d given)" % ( - fnname, + fnname, nargs) else: defcount = self.num_defaults diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -204,7 +204,7 @@ fresh_virtualizable=True) args_matched = args.parse_into_scope(None, fresh_frame.fastlocals_w, func.name, - sig, func.defs.getitems()) + sig, func.defs) fresh_frame.init_cells() return frame.run() @@ -217,7 +217,7 @@ fresh_virtualizable=True) args_matched = args.parse_into_scope(w_obj, fresh_frame.fastlocals_w, func.name, - sig, func.defs.getitems()) + sig, func.defs) fresh_frame.init_cells() return frame.run() diff --git a/pypy/rpython/callparse.py b/pypy/rpython/callparse.py --- a/pypy/rpython/callparse.py +++ b/pypy/rpython/callparse.py @@ -1,4 +1,5 @@ from pypy.interpreter.argument import ArgumentsForTranslation, ArgErr +from pypy.interpreter.function import Defaults from pypy.annotation import model as annmodel from pypy.rpython import rtuple from pypy.rpython.error import TyperError @@ -52,7 +53,7 @@ for x in graph.defaults: defs_h.append(ConstHolder(x)) try: - holders = arguments.match_signature(signature, defs_h) + holders = arguments.match_signature(signature, Defaults(defs_h)) except ArgErr, e: raise TyperError, "signature mismatch: %s" % e.getmsg(graph.name) @@ -80,7 +81,7 @@ v = self._emit(repr, hop) cache[repr] = v return v - + class VarHolder(Holder): @@ -95,7 +96,7 @@ assert self.is_tuple() n = len(self.s_obj.items) return tuple([ItemHolder(self, i) for i in range(n)]) - + def _emit(self, repr, hop): return hop.inputarg(repr, arg=self.num) @@ -186,4 +187,4 @@ def type(self, item): return type(item) - + diff --git a/pypy/annotation/description.py b/pypy/annotation/description.py --- a/pypy/annotation/description.py +++ b/pypy/annotation/description.py @@ -3,6 +3,7 @@ from pypy.interpreter.pycode import cpython_code_signature from pypy.interpreter.argument import rawshape from pypy.interpreter.argument import ArgErr +from pypy.interpreter.function import Defaults from pypy.tool.sourcetools import valid_identifier from pypy.tool.pairtype import extendabletype @@ -15,7 +16,7 @@ overridden = False normalized = False modified = True - + def __init__(self, desc): self.descs = { desc: True } self.calltables = {} # see calltable_lookup_row() @@ -172,7 +173,7 @@ class FunctionDesc(Desc): knowntype = types.FunctionType overridden = False - + def __init__(self, bookkeeper, pyobj=None, name=None, signature=None, defaults=None, specializer=None): @@ -230,7 +231,7 @@ return '_'.join(map(nameof, thing)) else: return str(thing)[:30] - + if key is not None and alt_name is None: postfix = valid_identifier(nameof(key)) alt_name = "%s__%s"%(self.name, postfix) @@ -250,7 +251,7 @@ for x in defaults: defs_s.append(self.bookkeeper.immutablevalue(x)) try: - inputcells = args.match_signature(signature, defs_s) + inputcells = args.match_signature(signature, Defaults(defs_s)) except ArgErr, e: raise TypeError, "signature mismatch: %s" % e.getmsg(self.name) return inputcells @@ -291,7 +292,7 @@ def bind_under(self, classdef, name): # XXX static methods - return self.bookkeeper.getmethoddesc(self, + return self.bookkeeper.getmethoddesc(self, classdef, # originclassdef, None, # selfclassdef name) @@ -574,7 +575,7 @@ while name not in cdesc.classdict: cdesc = cdesc.basedesc if cdesc is None: - return None + return None else: return cdesc @@ -750,7 +751,7 @@ class MethodDesc(Desc): knowntype = types.MethodType - def __init__(self, bookkeeper, funcdesc, originclassdef, + def __init__(self, bookkeeper, funcdesc, originclassdef, selfclassdef, name, flags={}): super(MethodDesc, self).__init__(bookkeeper) self.funcdesc = funcdesc @@ -803,7 +804,7 @@ # FunctionDescs, not MethodDescs. The present method returns the # FunctionDesc to use as a key in that family. return self.funcdesc - + def simplify_desc_set(descs): # Some hacking needed to make contains() happy on SomePBC: if the # set of MethodDescs contains some "redundant" ones, i.e. ones that @@ -894,7 +895,7 @@ return s_ImpossibleValue else: return self.bookkeeper.immutablevalue(value) - + def create_new_attribute(self, name, value): try: self.read_attribute(name) @@ -946,7 +947,7 @@ s_self = SomePBC([self.frozendesc]) args = args.prepend(s_self) return self.funcdesc.pycall(schedule, args, s_previous_result) - + def consider_call_site(bookkeeper, family, descs, args, s_result): shape = rawshape(args, nextra=1) # account for the extra 'self' funcdescs = [mofdesc.funcdesc for mofdesc in descs] diff --git a/pypy/objspace/std/bytearrayobject.py b/pypy/objspace/std/bytearrayobject.py --- a/pypy/objspace/std/bytearrayobject.py +++ b/pypy/objspace/std/bytearrayobject.py @@ -22,6 +22,7 @@ from pypy.interpreter import gateway from pypy.interpreter.argument import Signature from pypy.interpreter.buffer import RWBuffer +from pypy.interpreter.function import Defaults from pypy.objspace.std.bytearraytype import ( makebytearraydata_w, getbytevalue, new_bytearray @@ -42,7 +43,7 @@ registerimplementation(W_BytearrayObject) init_signature = Signature(['source', 'encoding', 'errors'], None, None) -init_defaults = [None, None, None] +init_defaults = Defaults([None, None, None]) def init__Bytearray(space, w_bytearray, __args__): # this is on the silly side diff --git a/pypy/interpreter/test/test_argument.py b/pypy/interpreter/test/test_argument.py --- a/pypy/interpreter/test/test_argument.py +++ b/pypy/interpreter/test/test_argument.py @@ -1,8 +1,9 @@ import py -from pypy.interpreter.argument import Arguments, ArgumentsForTranslation, ArgErr -from pypy.interpreter.argument import ArgErrUnknownKwds, ArgErrMultipleValues -from pypy.interpreter.argument import ArgErrCount, rawshape, Signature +from pypy.interpreter.argument import (Arguments, ArgumentsForTranslation, + ArgErr, ArgErrUnknownKwds, ArgErrMultipleValues, ArgErrCount, rawshape, + Signature) from pypy.interpreter.error import OperationError +from pypy.interpreter.function import Defaults class TestSignature(object): @@ -69,7 +70,7 @@ return list(it) def listview(self, it): - return list(it) + return list(it) def unpackiterable(self, it): return list(it) @@ -158,18 +159,18 @@ def test_fixedunpacked(self): space = DummySpace() - + args = Arguments(space, [], ["k"], [1]) py.test.raises(ValueError, args.fixedunpack, 1) args = Arguments(space, ["a", "b"]) py.test.raises(ValueError, args.fixedunpack, 0) - py.test.raises(ValueError, args.fixedunpack, 1) + py.test.raises(ValueError, args.fixedunpack, 1) py.test.raises(ValueError, args.fixedunpack, 3) py.test.raises(ValueError, args.fixedunpack, 4) assert args.fixedunpack(2) == ['a', 'b'] - + def test_match0(self): space = DummySpace() args = Arguments(space, []) @@ -183,7 +184,7 @@ py.test.raises(ArgErr, args._match_signature, None, l, Signature(["a"], "*")) args = Arguments(space, []) l = [None] - args._match_signature(None, l, Signature(["a"]), defaults_w=[1]) + args._match_signature(None, l, Signature(["a"]), defaults=Defaults([1])) assert l == [1] args = Arguments(space, []) l = [None] @@ -231,7 +232,7 @@ py.test.raises(ArgErr, args._match_signature, firstarg, l, Signature(["a", "b", "c", "d", "e"], "*")) l = [None, None, None, None, None] args = Arguments(space, arglist, w_stararg=starargs) - args._match_signature(firstarg, l, Signature(["a", "b", "c", "d", "e"]), defaults_w=[1]) + args._match_signature(firstarg, l, Signature(["a", "b", "c", "d", "e"]), defaults=Defaults([1])) assert l == [4, 5, 6, 7, 1] for j in range(len(values)): l = [None] * (j + 1) @@ -256,24 +257,24 @@ assert len(keywords) == len(keywords_w) args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None] - args._match_signature(None, l, Signature(["a", "b", "c"]), defaults_w=[4]) + args._match_signature(None, l, Signature(["a", "b", "c"]), defaults=Defaults([4])) assert l == [1, 2, 3] args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None, None] - args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults_w=[4, 5]) + args._match_signature(None, l, Signature(["a", "b", "b1", "c"]), defaults=Defaults([4, 5])) assert l == [1, 2, 4, 3] args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None, None] - args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults_w=[4, 5]) + args._match_signature(None, l, Signature(["a", "b", "c", "d"]), defaults=Defaults([4, 5])) assert l == [1, 2, 3, 5] args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None, None] py.test.raises(ArgErr, args._match_signature, None, l, - Signature(["c", "b", "a", "d"]), defaults_w=[4, 5]) + Signature(["c", "b", "a", "d"]), defaults=Defaults([4, 5])) args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None, None] py.test.raises(ArgErr, args._match_signature, None, l, - Signature(["a", "b", "c1", "d"]), defaults_w=[4, 5]) + Signature(["a", "b", "c1", "d"]), defaults=Defaults([4, 5])) args = Arguments(space, [1, 2], keywords[:], keywords_w[:], w_starstararg=w_kwds) l = [None, None, None] args._match_signature(None, l, Signature(["a", "b"], None, "**")) @@ -354,9 +355,10 @@ calls = [] def _match_signature(w_firstarg, scope_w, signature, - defaults_w=[], blindargs=0): + defaults=None, blindargs=0): + defaults = [] if defaults is None else defaults.getitems() calls.append((w_firstarg, scope_w, signature.argnames, signature.has_vararg(), - signature.has_kwarg(), defaults_w, blindargs)) + signature.has_kwarg(), defaults, blindargs)) args._match_signature = _match_signature scope_w = args.parse_obj(None, "foo", Signature(["a", "b"], None, None)) @@ -365,7 +367,7 @@ [], 0) assert calls[0][1] is scope_w calls = [] - + scope_w = args.parse_obj(None, "foo", Signature(["a", "b"], "args", None), blindargs=1) assert len(calls) == 1 @@ -374,15 +376,15 @@ calls = [] scope_w = args.parse_obj(None, "foo", Signature(["a", "b"], "args", "kw"), - defaults_w=['x', 'y']) + defaults=Defaults(['x', 'y'])) assert len(calls) == 1 assert calls[0] == (None, [None, None, None, None], ["a", "b"], True, True, ["x", "y"], 0) calls = [] - + scope_w = args.parse_obj("obj", "foo", Signature(["a", "b"], "args", "kw"), - defaults_w=['x', 'y'], blindargs=1) + defaults=Defaults(['x', 'y']), blindargs=1) assert len(calls) == 1 assert calls[0] == ("obj", [None, None, None, None], ["a", "b"], True, True, @@ -411,9 +413,10 @@ calls = [] def _match_signature(w_firstarg, scope_w, signature, - defaults_w=[], blindargs=0): + defaults=None, blindargs=0): + defaults = [] if defaults is None else defaults.getitems() calls.append((w_firstarg, scope_w, signature.argnames, signature.has_vararg(), - signature.has_kwarg(), defaults_w, blindargs)) + signature.has_kwarg(), defaults, blindargs)) args._match_signature = _match_signature scope_w = [None, None] @@ -426,17 +429,17 @@ scope_w = [None, None, None, None] args.parse_into_scope(None, scope_w, "foo", Signature(["a", "b"], "args", "kw"), - defaults_w=['x', 'y']) + defaults=Defaults(['x', 'y'])) assert len(calls) == 1 assert calls[0] == (None, scope_w, ["a", "b"], True, True, ["x", "y"], 0) calls = [] - scope_w = [None, None, None, None] + scope_w = [None, None, None, None] args.parse_into_scope("obj", scope_w, "foo", Signature(["a", "b"], "args", "kw"), - defaults_w=['x', 'y']) + defaults=Defaults(['x', 'y'])) assert len(calls) == 1 assert calls[0] == ("obj", scope_w, ["a", "b"], True, True, @@ -468,7 +471,7 @@ assert args.arguments_w == [1] assert set(args.keywords) == set(['a', 'b']) assert args.keywords_w[args.keywords.index('a')] == 2 - assert args.keywords_w[args.keywords.index('b')] == 3 + assert args.keywords_w[args.keywords.index('b')] == 3 args = Arguments(space, [1]) w_args, w_kwds = args.topacked() @@ -508,25 +511,25 @@ def test_missing_args(self): # got_nargs, nkwds, expected_nargs, has_vararg, has_kwarg, # defaults_w, missing_args - err = ArgErrCount(1, 0, 0, False, False, [], 0) + err = ArgErrCount(1, 0, 0, False, False, Defaults([]), 0) s = err.getmsg('foo') assert s == "foo() takes no argument (1 given)" - err = ArgErrCount(0, 0, 1, False, False, [], 1) + err = ArgErrCount(0, 0, 1, False, False, Defaults([]), 1) s = err.getmsg('foo') assert s == "foo() takes exactly 1 argument (0 given)" - err = ArgErrCount(3, 0, 2, False, False, [], 0) + err = ArgErrCount(3, 0, 2, False, False, Defaults([]), 0) s = err.getmsg('foo') assert s == "foo() takes exactly 2 arguments (3 given)" - err = ArgErrCount(1, 0, 2, True, False, [], 1) + err = ArgErrCount(1, 0, 2, True, False, Defaults([]), 1) s = err.getmsg('foo') assert s == "foo() takes at least 2 arguments (1 given)" - err = ArgErrCount(3, 0, 2, True, False, ['a'], 0) + err = ArgErrCount(3, 0, 2, True, False, Defaults(['a']), 0) s = err.getmsg('foo') assert s == "foo() takes at most 2 arguments (3 given)" - err = ArgErrCount(0, 1, 2, True, False, ['a'], 1) + err = ArgErrCount(0, 1, 2, True, False, Defaults(['a']), 1) s = err.getmsg('foo') assert s == "foo() takes at least 1 argument (1 given)" - err = ArgErrCount(2, 1, 1, False, True, [], 0) + err = ArgErrCount(2, 1, 1, False, True, Defaults([]), 0) s = err.getmsg('foo') assert s == "foo() takes exactly 1 argument (3 given)" @@ -600,7 +603,7 @@ args = make_arguments_for_translation(space, [1]) sig = Signature(['a', 'b', 'c'], None, None) - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() @@ -612,25 +615,25 @@ args = make_arguments_for_translation(space, [1], {'c': 3, 'b': 2}) sig = Signature(['a', 'b', 'c'], None, None) - data = args.match_signature(sig, []) + data = args.match_signature(sig, Defaults([])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() args = make_arguments_for_translation(space, [1], {'c': 5}) sig = Signature(['a', 'b', 'c'], None, None) - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() args = make_arguments_for_translation(space, [1], {'c': 5, 'd': 7}) sig = Signature(['a', 'b', 'c'], None, 'kw') - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() args = make_arguments_for_translation(space, [1,2,3,4,5], {'e': 5, 'd': 7}) sig = Signature(['a', 'b', 'c'], 'r', 'kw') - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() @@ -638,7 +641,7 @@ w_stararg=[1], w_starstararg={'c': 5, 'd': 7}) sig = Signature(['a', 'b', 'c'], None, 'kw') - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() @@ -646,7 +649,7 @@ w_stararg=[3,4,5], w_starstararg={'e': 5, 'd': 7}) sig = Signature(['a', 'b', 'c'], 'r', 'kw') - data = args.match_signature(sig, [2, 3]) + data = args.match_signature(sig, Defaults([2, 3])) new_args = args.unmatch_signature(sig, data) assert args.unpack() == new_args.unpack() @@ -684,7 +687,7 @@ assert rawshape(args) == (2, ('g', ), True, True) def test_copy_and_shape(self): - space = DummySpace() + space = DummySpace() args = ArgumentsForTranslation(space, ['a'], ['x'], [1], ['w1'], {'y': 'w2'}) args1 = args.copy() diff --git a/pypy/objspace/std/dictmultiobject.py b/pypy/objspace/std/dictmultiobject.py --- a/pypy/objspace/std/dictmultiobject.py +++ b/pypy/objspace/std/dictmultiobject.py @@ -2,8 +2,9 @@ from pypy.objspace.std.model import registerimplementation, W_Object from pypy.objspace.std.register_all import register_all from pypy.interpreter import gateway +from pypy.interpreter.argument import Signature from pypy.interpreter.error import OperationError, operationerrfmt -from pypy.interpreter.argument import Signature +from pypy.interpreter.function import Defaults from pypy.module.__builtin__.__init__ import BUILTIN_TO_INDEX, OPTIMIZED_BUILTINS from pypy.rlib.objectmodel import r_dict, we_are_translated @@ -67,7 +68,7 @@ assert self.r_dict_content is None self.r_dict_content = r_dict(self.space.eq_w, self.space.hash_w) return self.r_dict_content - + def initialize_content(w_self, list_pairs_w): for w_k, w_v in list_pairs_w: @@ -188,7 +189,7 @@ def impl_fallback_delitem(self, w_key): del self.r_dict_content[w_key] - + def impl_fallback_length(self): return len(self.r_dict_content) @@ -305,7 +306,7 @@ def __init__(self, space): self.space = space self.content = {} - + def impl_setitem(self, w_key, w_value): space = self.space if space.is_w(space.type(w_key), space.w_str): @@ -326,7 +327,7 @@ raise KeyError else: self._as_rdict().impl_fallback_delitem(w_key) - + def impl_length(self): return len(self.content) @@ -605,7 +606,7 @@ init_signature = Signature(['seq_or_map'], None, 'kwargs') -init_defaults = [None] +init_defaults = Defaults([None]) def update1(space, w_dict, w_data): if space.findattr(w_data, space.wrap("keys")) is None: @@ -670,7 +671,7 @@ w_dict.delitem(w_key) except KeyError: space.raise_key_error(w_key) - + def len__DictMulti(space, w_dict): return space.wrap(w_dict.length()) @@ -701,7 +702,7 @@ return space.w_True def characterize(space, w_a, w_b): - """ (similar to CPython) + """ (similar to CPython) returns the smallest key in acontent for which b's value is different or absent and this value """ w_smallest_diff_a_key = None w_its_value = None @@ -735,10 +736,10 @@ w_rightdiff, w_rightval = characterize(space, w_right, w_left) if w_rightdiff is None: # w_leftdiff is not None, w_rightdiff is None - return space.w_True + return space.w_True w_res = space.lt(w_leftdiff, w_rightdiff) if (not space.is_true(w_res) and - space.eq_w(w_leftdiff, w_rightdiff) and + space.eq_w(w_leftdiff, w_rightdiff) and w_rightval is not None): w_res = space.lt(w_leftval, w_rightval) return w_res diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py --- a/pypy/interpreter/gateway.py +++ b/pypy/interpreter/gateway.py @@ -594,7 +594,7 @@ space = func.space activation = self.activation scope_w = args.parse_obj(w_obj, func.name, self.sig, - func.defs.getitems(), self.minargs) + func.defs, self.minargs) try: w_result = activation._run(space, scope_w) except DescrMismatch: diff --git a/pypy/interpreter/test/test_main.py b/pypy/interpreter/test/test_main.py --- a/pypy/interpreter/test/test_main.py +++ b/pypy/interpreter/test/test_main.py @@ -1,6 +1,6 @@ from cStringIO import StringIO -import py +import py from pypy.tool.udir import udir from pypy.interpreter.baseobjspace import OperationError from pypy.interpreter import main @@ -25,23 +25,23 @@ testresultoutput = '11\n' def checkoutput(space, expected_output,f,*args): - w_oldout = space.sys.get('stdout') + w_oldout = space.sys.get('stdout') capturefn = udir.join('capturefile') - capturefile = capturefn.open('w') + capturefile = capturefn.open('w') w_sys = space.sys.getmodule('sys') space.setattr(w_sys, space.wrap("stdout"), space.wrap(capturefile)) try: f(*(args + (space,))) finally: space.setattr(w_sys, space.wrap("stdout"), w_oldout) - capturefile.close() + capturefile.close() assert capturefn.read(mode='rU') == expected_output testfn = udir.join('tmp_hello_world.py') testmodule = 'tmp_hello_module' testpackage = 'tmp_package' -class TestMain: +class TestMain: def setup_class(cls): testfn.write(testcode, 'w') udir.join(testmodule + '.py').write(testmodulecode, 'w') diff --git a/pypy/objspace/std/setobject.py b/pypy/objspace/std/setobject.py --- a/pypy/objspace/std/setobject.py +++ b/pypy/objspace/std/setobject.py @@ -5,6 +5,7 @@ from pypy.interpreter.error import OperationError from pypy.interpreter import gateway from pypy.interpreter.argument import Signature +from pypy.interpreter.function import Defaults from pypy.objspace.std.settype import set_typedef as settypedef from pypy.objspace.std.frozensettype import frozenset_typedef as frozensettypedef @@ -14,7 +15,7 @@ # make sure that Base is used for Set and Frozenset in multimethod # declarations @classmethod - def is_implementation_for(cls, typedef): + def is_implementation_for(cls, typedef): if typedef is frozensettypedef or typedef is settypedef: assert cls is W_BaseSetObject return True @@ -619,7 +620,7 @@ cmp__Frozenset_frozensettypedef = cmp__Set_settypedef init_signature = Signature(['some_iterable'], None, None) -init_defaults = [None] +init_defaults = Defaults([None]) def init__Set(space, w_set, __args__): w_iterable, = __args__.parse_obj( None, 'set', @@ -641,7 +642,7 @@ del currently_in_repr[set_id] except: pass -""", filename=__file__) +""", filename=__file__) setrepr = app.interphook("setrepr") From commits-noreply at bitbucket.org Mon Mar 21 14:06:23 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 14:06:23 +0100 (CET) Subject: [pypy-svn] pypy default: merged upstream Message-ID: <20110321130623.03AAC282BD9@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42811:849129c29fd4 Date: 2011-03-21 09:05 -0400 http://bitbucket.org/pypy/pypy/changeset/849129c29fd4/ Log: merged upstream From commits-noreply at bitbucket.org Mon Mar 21 14:31:12 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 21 Mar 2011 14:31:12 +0100 (CET) Subject: [pypy-svn] pypy default: kill this assert, it is useless Message-ID: <20110321133112.C42FF282BD9@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42812:3b871a5ccb25 Date: 2011-03-18 15:50 +0100 http://bitbucket.org/pypy/pypy/changeset/3b871a5ccb25/ Log: kill this assert, it is useless diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -980,7 +980,6 @@ while i < 640 * 480: assert len(img) == 3*350*480 assert len(intimg) == 640*480 - assert i >= 0 l = l + img[i] intimg[i] = (intimg[i-640] + l) i += 1 @@ -992,12 +991,12 @@ assert loop.match(""" i13 = int_lt(i8, 307200) guard_true(i13, descr=) - # the bound check guard on img has been killed (thanks to the 1st and 2nd asserts) + # the bound check guard on img has been killed (thanks to the asserts) i14 = getarrayitem_raw(i10, i8, descr=<.*ArrayNoLengthDescr>) i15 = int_add_ovf(i9, i14) guard_no_overflow(descr=) i17 = int_sub(i8, 640) - # the bound check guard on intimg has been killed (thanks to the 3rd assert) + # the bound check guard on intimg has been killed (thanks to the asserts) i18 = getarrayitem_raw(i11, i17, descr=<.*ArrayNoLengthDescr>) i19 = int_add_ovf(i18, i15) guard_no_overflow(descr=) From commits-noreply at bitbucket.org Mon Mar 21 14:31:13 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 21 Mar 2011 14:31:13 +0100 (CET) Subject: [pypy-svn] pypy default: herge heads Message-ID: <20110321133113.6B213282BD9@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42813:50f82a9190ee Date: 2011-03-21 09:56 +0100 http://bitbucket.org/pypy/pypy/changeset/50f82a9190ee/ Log: herge heads diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -730,7 +730,7 @@ where x and y can be either constants or variables. There are cases in which the second guard is proven to be always true. """ - + for a, b, res, opt_expected in (('2000', '2000', 20001000, True), ( '500', '500', 15001500, True), ( '300', '600', 16001700, False), @@ -830,7 +830,7 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - ops = ('<', '>', '<=', '>=', '==', '!=') + ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): @@ -880,7 +880,7 @@ test only checks that we get the expected result, not that any optimization has been applied. """ - ops = ('<', '>', '<=', '>=', '==', '!=') + ops = ('<', '>', '<=', '>=', '==', '!=') for op1 in ops: for op2 in ops: for a,b in ((500, 500), (300, 600)): @@ -1007,3 +1007,33 @@ --TICK-- jump(p0, p1, p2, p3, p4, p5, p6, p7, i28, i15, i10, i11, descr=) """) + + def test_func_defaults(self): + def main(n): + i = 1 + while i < n: + i += len(xrange(i)) / i + return i + + log = self.run(main, [10000]) + assert log.result == 10000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i10 = int_lt(i5, i6) + guard_true(i10, descr=) + # This can be improved if the JIT realized the lookup of i5 produces + # a constant and thus can be removed entirely + i12 = int_sub(i5, 1) + i13 = uint_floordiv(i12, i7) + i15 = int_add(i13, 1) + i17 = int_lt(i15, 0) + guard_false(i17, descr=) + i18 = int_floordiv(i15, i5) + i19 = int_xor(i15, i5) + i20 = int_mod(i15, i5) + i21 = int_is_true(i20) + i22 = int_add_ovf(i5, i18) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, i22, i6, i7, p8, p9, descr=) + """) \ No newline at end of file From commits-noreply at bitbucket.org Mon Mar 21 14:31:14 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 21 Mar 2011 14:31:14 +0100 (CET) Subject: [pypy-svn] pypy default: (antocuni, alex_gaynor): fix the test for the newer version of pypy; also, check the full trace instead of just parts of it Message-ID: <20110321133114.0D49D282BD9@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42814:2784a1d3e285 Date: 2011-03-21 14:30 +0100 http://bitbucket.org/pypy/pypy/changeset/2784a1d3e285/ Log: (antocuni, alex_gaynor): fix the test for the newer version of pypy; also, check the full trace instead of just parts of it diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -500,24 +500,24 @@ log = self.run(main, [1000], threshold=400) assert log.result == 1000 * 999 / 2 loop, = log.loops_by_filename(self.filepath) - assert loop.match_by_id('getitem', opcode='BINARY_SUBSCR', expected_src=""" - i43 = int_lt(i25, 0) - guard_false(i43, descr=) - i44 = int_ge(i25, i39) - guard_false(i44, descr=) - i45 = int_mul(i25, i33) + assert loop.match(""" + i16 = int_ge(i11, i12) + guard_false(i16, descr=) + i17 = int_mul(i11, i14) + i18 = int_add(i15, i17) + i20 = int_add(i11, 1) + i21 = force_token() + setfield_gc(p4, i20, descr=<.* .*W_AbstractSeqIterObject.inst_index .*>) + i23 = int_lt(i18, 0) + guard_false(i23, descr=) + i25 = int_ge(i18, i9) + guard_false(i25, descr=) + i26 = int_mul(i18, i10) + i27 = int_add_ovf(i7, i26) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, p6, i27, i18, i9, i10, i20, i12, p13, i14, i15, descr=) """) - assert loop.match_by_id('for', opcode='FOR_ITER', expected_src=""" - i23 = int_ge(i11, i12) - guard_false(i23, descr=) - i24 = int_mul(i11, i14) - i25 = int_add(i15, i24) - i27 = int_add(i11, 1) - # even if it's a the end of the loop, the jump still belongs to - # the FOR_ITER opcode - jump(p0, p1, p2, p3, p4, p5, p6, i46, i25, i39, i33, i27, i12, p13, i14, i15, p16, i17, i18, p19, p20, i21, i22, descr=) - """) - def test_exception_inside_loop_1(self): def main(n): @@ -1036,4 +1036,4 @@ guard_no_overflow(descr=) --TICK-- jump(p0, p1, p2, p3, p4, i22, i6, i7, p8, p9, descr=) - """) \ No newline at end of file + """) From commits-noreply at bitbucket.org Mon Mar 21 14:31:14 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 21 Mar 2011 14:31:14 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110321133114.9FCB2282BDD@codespeak.net> Author: Antonio Cuni Branch: Changeset: r42815:8e98c5652219 Date: 2011-03-21 14:30 +0100 http://bitbucket.org/pypy/pypy/changeset/8e98c5652219/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 21 16:22:23 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 16:22:23 +0100 (CET) Subject: [pypy-svn] pypy default: fix geninterp and rpystone tests Message-ID: <20110321152223.877ED2A2002@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42816:02cd6973ee70 Date: 2011-03-21 11:22 -0400 http://bitbucket.org/pypy/pypy/changeset/02cd6973ee70/ Log: fix geninterp and rpystone tests diff --git a/pypy/translator/geninterplevel.py b/pypy/translator/geninterplevel.py --- a/pypy/translator/geninterplevel.py +++ b/pypy/translator/geninterplevel.py @@ -124,7 +124,7 @@ self.initcode.append1('%s = %s(%r)' % ( name, unique, repr(value) ) ) -def bltinmod_helper(self, mod): +def bltinmod_helper(self, mod): name = self.uniquename("mod_%s" % mod.__name__) unique = self.uniquenameofprebuilt("bltinmod_helper", bltinmod_helper) self.initcode.append1( @@ -144,7 +144,7 @@ self.modname = self.trans_funcname(modname or uniquemodulename(entrypoint)) self.moddict = moddict # the dict if we translate a module - + def late_OperationError(): self.initcode.append1( 'from pypy.interpreter.error import OperationError as gOperationError') @@ -187,19 +187,19 @@ def _issubtype(cls1, cls2): raise TypeError, "this dummy should *not* be reached" __builtin__._issubtype = _issubtype - + class bltinstub: def __init__(self, name): self.__name__ = name def __repr__(self): return '<%s>' % self.__name__ - + self.ibuiltin_ids = identity_dict() self.ibuiltin_ids.update([ (value, bltinstub(key)) for key, value in __builtin__.__dict__.items() if callable(value) and type(value) not in [types.ClassType, type] ] ) - + self.space = FlowObjSpace() # for introspection self.use_fast_call = True @@ -210,7 +210,7 @@ self._signature_cache = {} self._defaults_cache = {} - + def expr(self, v, localscope, wrapped = True): if isinstance(v, Variable): return localscope.localname(v.name, wrapped) @@ -250,7 +250,7 @@ isinstance(frm_lst, Constant) and frm_lst.value is None): return "%s = space.getbuiltinmodule(%r)" % (self.expr(op.result, localscope), name.value) - exv = self.expr(v, localscope) + exv = self.expr(v, localscope) # default for a spacecall: fmt = "%(res)s = space.call_function(%(func)s, %(args)s)" # see if we can optimize for a fast call. @@ -380,7 +380,7 @@ txt = func() self.rpynames[key] = txt return txt - + except KeyError: if debug: stackentry = debug, obj @@ -455,7 +455,7 @@ except KeyError: self.rpynames[key] = txt = self.uniquename(basename) return txt - + def nameof_object(self, value): if type(value) is not object: # try to just wrap it? @@ -517,7 +517,7 @@ self.initcode.append1('import %s as _tmp' % value.__name__) self.initcode.append1('%s = space.wrap(_tmp)' % (name)) return name - + def nameof_int(self, value): if value >= 0: @@ -564,7 +564,7 @@ else: self.initcode.append('%s = space.wrap(%r)' % (name, value)) return name - + def nameof_str(self, value): if [c for c in value if c<' ' or c>'~' or c=='"' or c=='\\']: # non-printable string @@ -623,11 +623,11 @@ positional, varargs, varkwds, defs = inspect.getargspec(func) if varargs is varkwds is defs is None: unwrap = ', '.join(['gateway.W_Root']*len(positional)) - interp_name = 'fastf_' + name[6:] + interp_name = 'fastf_' + name[6:] else: unwrap = 'gateway.Arguments' interp_name = 'f_' + name[6:] - + self.initcode.append1('from pypy.interpreter import gateway') self.initcode.append1('%s = space.wrap(gateway.interp2app(%s, unwrap_spec=[gateway.ObjSpace, %s]))' % (name, interp_name, unwrap)) @@ -642,7 +642,7 @@ self.initcode.append1('%s = space.wrap(%s)' % (name, functionname)) return name - def nameof_instancemethod(self, meth): + def nameof_instancemethod(self, meth): if (not hasattr(meth.im_func, 'func_globals') or meth.im_func.func_globals is None): # built-in methods (bound or not) on top of PyPy or possibly 2.4 @@ -706,7 +706,7 @@ arities[name] = arity del arities["isinstance"] return self._space_arities - + def try_space_shortcut_for_builtin(self, v, nargs, args): if isinstance(v, Constant) and v.value in self.ibuiltin_ids: name = self.ibuiltin_ids[v.value].__name__ @@ -714,7 +714,7 @@ self.space_arities().get(name, -1) == nargs): return "space.%s" % name return None - + def nameof_builtin_function_or_method(self, func): if func.__self__ is None: return self.nameof_builtin_function(func) @@ -739,7 +739,7 @@ # return "(space.builtin.get(space.str_w(%s)))" % self.nameof(func.__name__) if modname == 'sys': # be lazy - return "(space.sys.get(space.str_w(%s)))" % self.nameof(func.__name__) + return "(space.sys.get(space.str_w(%s)))" % self.nameof(func.__name__) else: name = self.uniquename('gbltin_' + func.__name__) self.initcode.append1('%s = space.getattr(%s, %s)' % ( @@ -807,7 +807,7 @@ if isinstance(value, MethodType) and value.im_self is cls: log.WARNING("skipped classmethod: %s" % value) continue - + yield 'space.setattr(%s, %s, %s)' % ( name, self.nameof(key), self.nameof(value)) @@ -841,7 +841,7 @@ "klass": name, "name" : cls_name, "meta" : metaclass} ) - + self.later(initclassobj()) return name @@ -1058,7 +1058,7 @@ order_sections(fname) if self.ftmpname: copyfile(self.ftmpname, self.fname) - + def gen_source_temp(self): f = self.f @@ -1087,7 +1087,7 @@ # info.entrypoint must be done *after* __doc__ is handled, # because nameof(entrypoint) might touch __doc__ early. info["entrypoint"] = self.nameof(self.entrypoint) - + # function implementations while self.pendingfunctions or self.latercode: if self.pendingfunctions: @@ -1130,7 +1130,7 @@ # we should have an entrypoint function info['entrypointname'] = self.trans_funcname(self.entrypoint.__name__) print >> self.f, self.RPY_INIT_FOOTER % info - + def gen_global_declarations(self): g = self.globaldecl if g: @@ -1175,7 +1175,8 @@ pass defaultsname = self.uniquename('default') self._defaults_cache[key] = defaultsname - self.initcode.append("%s = [%s]" % (defaultsname, ', '.join(names))) + self.initcode.append("from pypy.interpreter.function import Defaults") + self.initcode.append("%s = Defaults([%s])" % (defaultsname, ', '.join(names))) return defaultsname def gen_rpyfunction(self, func): @@ -1284,7 +1285,7 @@ print >> f, line print >> f - + print >> f, fast_function_header if docstr is not None: print >> f, docstr @@ -1311,7 +1312,7 @@ yield " goto = %s # startblock" % self.mklabel(blocknum[start]) yield " while True:" - + def render_block(block): catch_exception = block.exitswitch == c_last_exception regular_op = len(block.operations) - catch_exception From commits-noreply at bitbucket.org Mon Mar 21 17:27:30 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 17:27:30 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: inline pyrepl Message-ID: <20110321162730.BF3972A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42817:6a364c6904cb Date: 2011-03-21 17:15 +0100 http://bitbucket.org/pypy/pypy/changeset/6a364c6904cb/ Log: inline pyrepl diff --git a/lib_pypy/pyrepl/module_lister.py b/lib_pypy/pyrepl/module_lister.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/module_lister.py @@ -0,0 +1,70 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl.completing_reader import uniqify +import os, sys + +# for the completion support. +# this is all quite nastily written. +_packages = {} + +def _make_module_list_dir(dir, suffs, prefix=''): + l = [] + for fname in os.listdir(dir): + file = os.path.join(dir, fname) + if os.path.isfile(file): + for suff in suffs: + if fname.endswith(suff): + l.append( prefix + fname[:-len(suff)] ) + break + elif os.path.isdir(file) \ + and os.path.exists(os.path.join(file, "__init__.py")): + l.append( prefix + fname ) + _packages[prefix + fname] = _make_module_list_dir( + file, suffs, prefix + fname + '.' ) + l = uniqify(l) + l.sort() + return l + +def _make_module_list(): + import imp + suffs = [x[0] for x in imp.get_suffixes() if x[0] != '.pyc'] + def compare(x, y): + c = -cmp(len(x), len(y)) + if c: + return c + else: + return -cmp(x, y) + suffs.sort(compare) + _packages[''] = list(sys.builtin_module_names) + for dir in sys.path: + if dir == '': + dir = '.' + if os.path.isdir(dir): + _packages[''] += _make_module_list_dir(dir, suffs) + _packages[''].sort() + +def find_modules(stem): + l = stem.split('.') + pack = '.'.join(l[:-1]) + try: + mods = _packages[pack] + except KeyError: + raise ImportError, "can't find \"%s\" package"%pack + return [mod for mod in mods if mod.startswith(stem)] diff --git a/lib_pypy/pyrepl/commands.py b/lib_pypy/pyrepl/commands.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/commands.py @@ -0,0 +1,385 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Antonio Cuni +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import sys, os + +# Catgories of actions: +# killing +# yanking +# motion +# editing +# history +# finishing +# [completion] + +class Command(object): + finish = 0 + kills_digit_arg = 1 + def __init__(self, reader, (event_name, event)): + self.reader = reader + self.event = event + self.event_name = event_name + def do(self): + pass + +class KillCommand(Command): + def kill_range(self, start, end): + if start == end: + return + r = self.reader + b = r.buffer + text = b[start:end] + del b[start:end] + if is_kill(r.last_command): + if start < r.pos: + r.kill_ring[-1] = text + r.kill_ring[-1] + else: + r.kill_ring[-1] = r.kill_ring[-1] + text + else: + r.kill_ring.append(text) + r.pos = start + r.dirty = 1 + +class YankCommand(Command): + pass + +class MotionCommand(Command): + pass + +class EditCommand(Command): + pass + +class FinishCommand(Command): + finish = 1 + pass + +def is_kill(command): + return command and issubclass(command, KillCommand) + +def is_yank(command): + return command and issubclass(command, YankCommand) + +# etc + +class digit_arg(Command): + kills_digit_arg = 0 + def do(self): + r = self.reader + c = self.event[-1] + if c == "-": + if r.arg is not None: + r.arg = -r.arg + else: + r.arg = -1 + else: + d = int(c) + if r.arg is None: + r.arg = d + else: + if r.arg < 0: + r.arg = 10*r.arg - d + else: + r.arg = 10*r.arg + d + r.dirty = 1 + +class clear_screen(Command): + def do(self): + r = self.reader + r.console.clear() + r.dirty = 1 + +class refresh(Command): + def do(self): + self.reader.dirty = 1 + +class repaint(Command): + def do(self): + self.reader.dirty = 1 + self.reader.console.repaint_prep() + +class kill_line(KillCommand): + def do(self): + r = self.reader + b = r.buffer + eol = r.eol() + for c in b[r.pos:eol]: + if not c.isspace(): + self.kill_range(r.pos, eol) + return + else: + self.kill_range(r.pos, eol+1) + +class unix_line_discard(KillCommand): + def do(self): + r = self.reader + self.kill_range(r.bol(), r.pos) + +# XXX unix_word_rubout and backward_kill_word should actually +# do different things... + +class unix_word_rubout(KillCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + self.kill_range(r.bow(), r.pos) + +class kill_word(KillCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + self.kill_range(r.pos, r.eow()) + +class backward_kill_word(KillCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + self.kill_range(r.bow(), r.pos) + +class yank(YankCommand): + def do(self): + r = self.reader + if not r.kill_ring: + r.error("nothing to yank") + return + r.insert(r.kill_ring[-1]) + +class yank_pop(YankCommand): + def do(self): + r = self.reader + b = r.buffer + if not r.kill_ring: + r.error("nothing to yank") + return + if not is_yank(r.last_command): + r.error("previous command was not a yank") + return + repl = len(r.kill_ring[-1]) + r.kill_ring.insert(0, r.kill_ring.pop()) + t = r.kill_ring[-1] + b[r.pos - repl:r.pos] = t + r.pos = r.pos - repl + len(t) + r.dirty = 1 + +class interrupt(FinishCommand): + def do(self): + import signal + self.reader.console.finish() + os.kill(os.getpid(), signal.SIGINT) + +class suspend(Command): + def do(self): + import signal + r = self.reader + p = r.pos + r.console.finish() + os.kill(os.getpid(), signal.SIGSTOP) + ## this should probably be done + ## in a handler for SIGCONT? + r.console.prepare() + r.pos = p + r.posxy = 0, 0 + r.dirty = 1 + r.console.screen = [] + +class up(MotionCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + bol1 = r.bol() + if bol1 == 0: + if r.historyi > 0: + r.select_item(r.historyi - 1) + return + r.pos = 0 + r.error("start of buffer") + return + bol2 = r.bol(bol1-1) + line_pos = r.pos - bol1 + if line_pos > bol1 - bol2 - 1: + r.sticky_y = line_pos + r.pos = bol1 - 1 + else: + r.pos = bol2 + line_pos + +class down(MotionCommand): + def do(self): + r = self.reader + b = r.buffer + for i in range(r.get_arg()): + bol1 = r.bol() + eol1 = r.eol() + if eol1 == len(b): + if r.historyi < len(r.history): + r.select_item(r.historyi + 1) + r.pos = r.eol(0) + return + r.pos = len(b) + r.error("end of buffer") + return + eol2 = r.eol(eol1+1) + if r.pos - bol1 > eol2 - eol1 - 1: + r.pos = eol2 + else: + r.pos = eol1 + (r.pos - bol1) + 1 + +class left(MotionCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + p = r.pos - 1 + if p >= 0: + r.pos = p + else: + self.reader.error("start of buffer") + +class right(MotionCommand): + def do(self): + r = self.reader + b = r.buffer + for i in range(r.get_arg()): + p = r.pos + 1 + if p <= len(b): + r.pos = p + else: + self.reader.error("end of buffer") + +class beginning_of_line(MotionCommand): + def do(self): + self.reader.pos = self.reader.bol() + +class end_of_line(MotionCommand): + def do(self): + r = self.reader + self.reader.pos = self.reader.eol() + +class home(MotionCommand): + def do(self): + self.reader.pos = 0 + +class end(MotionCommand): + def do(self): + self.reader.pos = len(self.reader.buffer) + +class forward_word(MotionCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + r.pos = r.eow() + +class backward_word(MotionCommand): + def do(self): + r = self.reader + for i in range(r.get_arg()): + r.pos = r.bow() + +class self_insert(EditCommand): + def do(self): + r = self.reader + r.insert(self.event * r.get_arg()) + +class insert_nl(EditCommand): + def do(self): + r = self.reader + r.insert("\n" * r.get_arg()) + +class transpose_characters(EditCommand): + def do(self): + r = self.reader + b = r.buffer + s = r.pos - 1 + if s < 0: + r.error("cannot transpose at start of buffer") + else: + if s == len(b): + s -= 1 + t = min(s + r.get_arg(), len(b) - 1) + c = b[s] + del b[s] + b.insert(t, c) + r.pos = t + r.dirty = 1 + +class backspace(EditCommand): + def do(self): + r = self.reader + b = r.buffer + for i in range(r.get_arg()): + if r.pos > 0: + r.pos -= 1 + del b[r.pos] + r.dirty = 1 + else: + self.reader.error("can't backspace at start") + +class delete(EditCommand): + def do(self): + r = self.reader + b = r.buffer + if ( r.pos == 0 and len(b) == 0 # this is something of a hack + and self.event[-1] == "\004"): + r.update_screen() + r.console.finish() + raise EOFError + for i in range(r.get_arg()): + if r.pos != len(b): + del b[r.pos] + r.dirty = 1 + else: + self.reader.error("end of buffer") + +class accept(FinishCommand): + def do(self): + pass + +class help(Command): + def do(self): + self.reader.msg = self.reader.help_text + self.reader.dirty = 1 + +class invalid_key(Command): + def do(self): + pending = self.reader.console.getpending() + s = ''.join(self.event) + pending.data + self.reader.error("`%r' not bound"%s) + +class invalid_command(Command): + def do(self): + s = self.event_name + self.reader.error("command `%s' not known"%s) + +class qIHelp(Command): + def do(self): + r = self.reader + r.insert((self.event + r.console.getpending().data) * r.get_arg()) + r.pop_input_trans() + +from pyrepl import input + +class QITrans(object): + def push(self, evt): + self.evt = evt + def get(self): + return ('qIHelp', self.evt.raw) + +class quoted_insert(Command): + kills_digit_arg = 0 + def do(self): + self.reader.push_input_trans(QITrans()) diff --git a/lib_pypy/pyrepl/input.py b/lib_pypy/pyrepl/input.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/input.py @@ -0,0 +1,97 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# (naming modules after builtin functions is not such a hot idea...) + +# an KeyTrans instance translates Event objects into Command objects + +# hmm, at what level do we want [C-i] and [tab] to be equivalent? +# [meta-a] and [esc a]? obviously, these are going to be equivalent +# for the UnixConsole, but should they be for PygameConsole? + +# it would in any situation seem to be a bad idea to bind, say, [tab] +# and [C-i] to *different* things... but should binding one bind the +# other? + +# executive, temporary decision: [tab] and [C-i] are distinct, but +# [meta-key] is identified with [esc key]. We demand that any console +# class does quite a lot towards emulating a unix terminal. + +from pyrepl import unicodedata_ + +class InputTranslator(object): + def push(self, evt): + pass + def get(self): + pass + def empty(self): + pass + +class KeymapTranslator(InputTranslator): + def __init__(self, keymap, verbose=0, + invalid_cls=None, character_cls=None): + self.verbose = verbose + from pyrepl.keymap import compile_keymap, parse_keys + self.keymap = keymap + self.invalid_cls = invalid_cls + self.character_cls = character_cls + d = {} + for keyspec, command in keymap: + keyseq = tuple(parse_keys(keyspec)) + d[keyseq] = command + if self.verbose: + print d + self.k = self.ck = compile_keymap(d, ()) + self.results = [] + self.stack = [] + def push(self, evt): + if self.verbose: + print "pushed", evt.data, + key = evt.data + d = self.k.get(key) + if isinstance(d, dict): + if self.verbose: + print "transition" + self.stack.append(key) + self.k = d + else: + if d is None: + if self.verbose: + print "invalid" + if self.stack or len(key) > 1 or unicodedata_.category(key) == 'C': + self.results.append( + (self.invalid_cls, self.stack + [key])) + else: + # small optimization: + self.k[key] = self.character_cls + self.results.append( + (self.character_cls, [key])) + else: + if self.verbose: + print "matched", d + self.results.append((d, self.stack + [key])) + self.stack = [] + self.k = self.ck + def get(self): + if self.results: + return self.results.pop(0) + else: + return None + def empty(self): + return not self.results diff --git a/lib_pypy/pyrepl/completer.py b/lib_pypy/pyrepl/completer.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/completer.py @@ -0,0 +1,87 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import __builtin__ + +class Completer: + def __init__(self, ns): + self.ns = ns + + def complete(self, text): + if "." in text: + return self.attr_matches(text) + else: + return self.global_matches(text) + + def global_matches(self, text): + """Compute matches when text is a simple name. + + Return a list of all keywords, built-in functions and names + currently defines in __main__ that match. + + """ + import keyword + matches = [] + n = len(text) + for list in [keyword.kwlist, + __builtin__.__dict__.keys(), + self.ns.keys()]: + for word in list: + if word[:n] == text and word != "__builtins__": + matches.append(word) + return matches + + def attr_matches(self, text): + """Compute matches when text contains a dot. + + Assuming the text is of the form NAME.NAME....[NAME], and is + evaluatable in the globals of __main__, it will be evaluated + and its attributes (as revealed by dir()) are used as possible + completions. (For class instances, class members are are also + considered.) + + WARNING: this can still invoke arbitrary C code, if an object + with a __getattr__ hook is evaluated. + + """ + import re + m = re.match(r"(\w+(\.\w+)*)\.(\w*)", text) + if not m: + return [] + expr, attr = m.group(1, 3) + object = eval(expr, self.ns) + words = dir(object) + if hasattr(object, '__class__'): + words.append('__class__') + words = words + get_class_members(object.__class__) + matches = [] + n = len(attr) + for word in words: + if word[:n] == attr and word != "__builtins__": + matches.append("%s.%s" % (expr, word)) + return matches + +def get_class_members(klass): + ret = dir(klass) + if hasattr(klass, '__bases__'): + for base in klass.__bases__: + ret = ret + get_class_members(base) + return ret + + diff --git a/lib_pypy/pyrepl/unicodedata_.py b/lib_pypy/pyrepl/unicodedata_.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/unicodedata_.py @@ -0,0 +1,59 @@ +try: + from unicodedata import * +except ImportError: + + def category(ch): + """ + ASCII only implementation + """ + if type(ch) is not unicode: + raise TypeError + if len(ch) != 1: + raise TypeError + return _categories.get(ord(ch), 'Co') # "Other, private use" + + _categories = { + 0: 'Cc', 1: 'Cc', 2: 'Cc', 3: 'Cc', 4: 'Cc', 5: 'Cc', + 6: 'Cc', 7: 'Cc', 8: 'Cc', 9: 'Cc', 10: 'Cc', 11: 'Cc', + 12: 'Cc', 13: 'Cc', 14: 'Cc', 15: 'Cc', 16: 'Cc', 17: 'Cc', + 18: 'Cc', 19: 'Cc', 20: 'Cc', 21: 'Cc', 22: 'Cc', 23: 'Cc', + 24: 'Cc', 25: 'Cc', 26: 'Cc', 27: 'Cc', 28: 'Cc', 29: 'Cc', + 30: 'Cc', 31: 'Cc', 32: 'Zs', 33: 'Po', 34: 'Po', 35: 'Po', + 36: 'Sc', 37: 'Po', 38: 'Po', 39: 'Po', 40: 'Ps', 41: 'Pe', + 42: 'Po', 43: 'Sm', 44: 'Po', 45: 'Pd', 46: 'Po', 47: 'Po', + 48: 'Nd', 49: 'Nd', 50: 'Nd', 51: 'Nd', 52: 'Nd', 53: 'Nd', + 54: 'Nd', 55: 'Nd', 56: 'Nd', 57: 'Nd', 58: 'Po', 59: 'Po', + 60: 'Sm', 61: 'Sm', 62: 'Sm', 63: 'Po', 64: 'Po', 65: 'Lu', + 66: 'Lu', 67: 'Lu', 68: 'Lu', 69: 'Lu', 70: 'Lu', 71: 'Lu', + 72: 'Lu', 73: 'Lu', 74: 'Lu', 75: 'Lu', 76: 'Lu', 77: 'Lu', + 78: 'Lu', 79: 'Lu', 80: 'Lu', 81: 'Lu', 82: 'Lu', 83: 'Lu', + 84: 'Lu', 85: 'Lu', 86: 'Lu', 87: 'Lu', 88: 'Lu', 89: 'Lu', + 90: 'Lu', 91: 'Ps', 92: 'Po', 93: 'Pe', 94: 'Sk', 95: 'Pc', + 96: 'Sk', 97: 'Ll', 98: 'Ll', 99: 'Ll', 100: 'Ll', 101: 'Ll', + 102: 'Ll', 103: 'Ll', 104: 'Ll', 105: 'Ll', 106: 'Ll', 107: 'Ll', + 108: 'Ll', 109: 'Ll', 110: 'Ll', 111: 'Ll', 112: 'Ll', 113: 'Ll', + 114: 'Ll', 115: 'Ll', 116: 'Ll', 117: 'Ll', 118: 'Ll', 119: 'Ll', + 120: 'Ll', 121: 'Ll', 122: 'Ll', 123: 'Ps', 124: 'Sm', 125: 'Pe', + 126: 'Sm', 127: 'Cc', 128: 'Cc', 129: 'Cc', 130: 'Cc', 131: 'Cc', + 132: 'Cc', 133: 'Cc', 134: 'Cc', 135: 'Cc', 136: 'Cc', 137: 'Cc', + 138: 'Cc', 139: 'Cc', 140: 'Cc', 141: 'Cc', 142: 'Cc', 143: 'Cc', + 144: 'Cc', 145: 'Cc', 146: 'Cc', 147: 'Cc', 148: 'Cc', 149: 'Cc', + 150: 'Cc', 151: 'Cc', 152: 'Cc', 153: 'Cc', 154: 'Cc', 155: 'Cc', + 156: 'Cc', 157: 'Cc', 158: 'Cc', 159: 'Cc', 160: 'Zs', 161: 'Po', + 162: 'Sc', 163: 'Sc', 164: 'Sc', 165: 'Sc', 166: 'So', 167: 'So', + 168: 'Sk', 169: 'So', 170: 'Ll', 171: 'Pi', 172: 'Sm', 173: 'Cf', + 174: 'So', 175: 'Sk', 176: 'So', 177: 'Sm', 178: 'No', 179: 'No', + 180: 'Sk', 181: 'Ll', 182: 'So', 183: 'Po', 184: 'Sk', 185: 'No', + 186: 'Ll', 187: 'Pf', 188: 'No', 189: 'No', 190: 'No', 191: 'Po', + 192: 'Lu', 193: 'Lu', 194: 'Lu', 195: 'Lu', 196: 'Lu', 197: 'Lu', + 198: 'Lu', 199: 'Lu', 200: 'Lu', 201: 'Lu', 202: 'Lu', 203: 'Lu', + 204: 'Lu', 205: 'Lu', 206: 'Lu', 207: 'Lu', 208: 'Lu', 209: 'Lu', + 210: 'Lu', 211: 'Lu', 212: 'Lu', 213: 'Lu', 214: 'Lu', 215: 'Sm', + 216: 'Lu', 217: 'Lu', 218: 'Lu', 219: 'Lu', 220: 'Lu', 221: 'Lu', + 222: 'Lu', 223: 'Ll', 224: 'Ll', 225: 'Ll', 226: 'Ll', 227: 'Ll', + 228: 'Ll', 229: 'Ll', 230: 'Ll', 231: 'Ll', 232: 'Ll', 233: 'Ll', + 234: 'Ll', 235: 'Ll', 236: 'Ll', 237: 'Ll', 238: 'Ll', 239: 'Ll', + 240: 'Ll', 241: 'Ll', 242: 'Ll', 243: 'Ll', 244: 'Ll', 245: 'Ll', + 246: 'Ll', 247: 'Sm', 248: 'Ll', 249: 'Ll', 250: 'Ll', 251: 'Ll', + 252: 'Ll', 253: 'Ll', 254: 'Ll' + } diff --git a/lib_pypy/pyrepl/keymaps.py b/lib_pypy/pyrepl/keymaps.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/keymaps.py @@ -0,0 +1,140 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +reader_emacs_keymap = tuple( + [(r'\C-a', 'beginning-of-line'), + (r'\C-b', 'left'), + (r'\C-c', 'interrupt'), + (r'\C-d', 'delete'), + (r'\C-e', 'end-of-line'), + (r'\C-f', 'right'), + (r'\C-g', 'cancel'), + (r'\C-h', 'backspace'), + (r'\C-j', 'self-insert'), + (r'\', 'accept'), + (r'\C-k', 'kill-line'), + (r'\C-l', 'clear-screen'), +# (r'\C-m', 'accept'), + (r'\C-q', 'quoted-insert'), + (r'\C-t', 'transpose-characters'), + (r'\C-u', 'unix-line-discard'), + (r'\C-v', 'quoted-insert'), + (r'\C-w', 'unix-word-rubout'), + (r'\C-x\C-u', 'upcase-region'), + (r'\C-y', 'yank'), + (r'\C-z', 'suspend'), + + (r'\M-b', 'backward-word'), + (r'\M-c', 'capitalize-word'), + (r'\M-d', 'kill-word'), + (r'\M-f', 'forward-word'), + (r'\M-l', 'downcase-word'), + (r'\M-t', 'transpose-words'), + (r'\M-u', 'upcase-word'), + (r'\M-y', 'yank-pop'), + (r'\M--', 'digit-arg'), + (r'\M-0', 'digit-arg'), + (r'\M-1', 'digit-arg'), + (r'\M-2', 'digit-arg'), + (r'\M-3', 'digit-arg'), + (r'\M-4', 'digit-arg'), + (r'\M-5', 'digit-arg'), + (r'\M-6', 'digit-arg'), + (r'\M-7', 'digit-arg'), + (r'\M-8', 'digit-arg'), + (r'\M-9', 'digit-arg'), + (r'\M-\n', 'self-insert'), + (r'\', 'self-insert')] + \ + [(c, 'self-insert') + for c in map(chr, range(32, 127)) if c <> '\\'] + \ + [(c, 'self-insert') + for c in map(chr, range(128, 256)) if c.isalpha()] + \ + [(r'\', 'up'), + (r'\', 'down'), + (r'\', 'left'), + (r'\', 'right'), + (r'\', 'quoted-insert'), + (r'\', 'delete'), + (r'\', 'backspace'), + (r'\M-\', 'backward-kill-word'), + (r'\', 'end'), + (r'\', 'home'), + (r'\', 'help'), + (r'\EOF', 'end'), # the entries in the terminfo database for xterms + (r'\EOH', 'home'), # seem to be wrong. this is a less than ideal + # workaround + ]) + +hist_emacs_keymap = reader_emacs_keymap + ( + (r'\C-n', 'next-history'), + (r'\C-p', 'previous-history'), + (r'\C-o', 'operate-and-get-next'), + (r'\C-r', 'reverse-history-isearch'), + (r'\C-s', 'forward-history-isearch'), + (r'\M-r', 'restore-history'), + (r'\M-.', 'yank-arg'), + (r'\', 'last-history'), + (r'\', 'first-history')) + +comp_emacs_keymap = hist_emacs_keymap + ( + (r'\t', 'complete'),) + +python_emacs_keymap = comp_emacs_keymap + ( + (r'\n', 'maybe-accept'), + (r'\M-\n', 'self-insert')) + +reader_vi_insert_keymap = tuple( + [(c, 'self-insert') + for c in map(chr, range(32, 127)) if c <> '\\'] + \ + [(c, 'self-insert') + for c in map(chr, range(128, 256)) if c.isalpha()] + \ + [(r'\C-d', 'delete'), + (r'\', 'backspace'), + ('')]) + +reader_vi_command_keymap = tuple( + [ + ('E', 'enter-emacs-mode'), + ('R', 'enter-replace-mode'), + ('dw', 'delete-word'), + ('dd', 'delete-line'), + + ('h', 'left'), + ('i', 'enter-insert-mode'), + ('j', 'down'), + ('k', 'up'), + ('l', 'right'), + ('r', 'replace-char'), + ('w', 'forward-word'), + ('x', 'delete'), + ('.', 'repeat-edit'), # argh! + (r'\', 'enter-insert-mode'), + ] + + [(c, 'digit-arg') for c in '01234567689'] + + []) + + +reader_keymaps = { + 'emacs' : reader_emacs_keymap, + 'vi-insert' : reader_vi_insert_keymap, + 'vi-command' : reader_vi_command_keymap + } + +del c # from the listcomps + diff --git a/lib_pypy/pyrepl/readline.py b/lib_pypy/pyrepl/readline.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/readline.py @@ -0,0 +1,404 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Alex Gaynor +# Antonio Cuni +# Armin Rigo +# Holger Krekel +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""A compatibility wrapper reimplementing the 'readline' standard module +on top of pyrepl. Not all functionalities are supported. Contains +extensions for multiline input. +""" + +import sys, os +from pyrepl import commands +from pyrepl.historical_reader import HistoricalReader +from pyrepl.completing_reader import CompletingReader +from pyrepl.unix_console import UnixConsole, _error + + +ENCODING = 'latin1' # XXX hard-coded + +__all__ = ['add_history', + 'clear_history', + 'get_begidx', + 'get_completer', + 'get_completer_delims', + 'get_current_history_length', + 'get_endidx', + 'get_history_item', + 'get_history_length', + 'get_line_buffer', + 'insert_text', + 'parse_and_bind', + 'read_history_file', + 'read_init_file', + 'redisplay', + 'remove_history_item', + 'replace_history_item', + 'set_completer', + 'set_completer_delims', + 'set_history_length', + 'set_pre_input_hook', + 'set_startup_hook', + 'write_history_file', + # ---- multiline extensions ---- + 'multiline_input', + ] + +# ____________________________________________________________ + +class ReadlineConfig(object): + readline_completer = None + completer_delims = dict.fromkeys(' \t\n`~!@#$%^&*()-=+[{]}\\|;:\'",<>/?') + +class ReadlineAlikeReader(HistoricalReader, CompletingReader): + + assume_immutable_completions = False + use_brackets = False + sort_in_column = True + + def error(self, msg="none"): + pass # don't show error messages by default + + def get_stem(self): + b = self.buffer + p = self.pos - 1 + completer_delims = self.config.completer_delims + while p >= 0 and b[p] not in completer_delims: + p -= 1 + return ''.join(b[p+1:self.pos]) + + def get_completions(self, stem): + result = [] + function = self.config.readline_completer + if function is not None: + try: + stem = str(stem) # rlcompleter.py seems to not like unicode + except UnicodeEncodeError: + pass # but feed unicode anyway if we have no choice + state = 0 + while True: + try: + next = function(stem, state) + except: + break + if not isinstance(next, str): + break + result.append(next) + state += 1 + # emulate the behavior of the standard readline that sorts + # the completions before displaying them. + result.sort() + return result + + def get_trimmed_history(self, maxlength): + if maxlength >= 0: + cut = len(self.history) - maxlength + if cut < 0: + cut = 0 + else: + cut = 0 + return self.history[cut:] + + # --- simplified support for reading multiline Python statements --- + + # This duplicates small parts of pyrepl.python_reader. I'm not + # reusing the PythonicReader class directly for two reasons. One is + # to try to keep as close as possible to CPython's prompt. The + # other is that it is the readline module that we are ultimately + # implementing here, and I don't want the built-in raw_input() to + # start trying to read multiline inputs just because what the user + # typed look like valid but incomplete Python code. So we get the + # multiline feature only when using the multiline_input() function + # directly (see _pypy_interact.py). + + more_lines = None + + def collect_keymap(self): + return super(ReadlineAlikeReader, self).collect_keymap() + ( + (r'\n', 'maybe-accept'),) + + def __init__(self, console): + super(ReadlineAlikeReader, self).__init__(console) + self.commands['maybe_accept'] = maybe_accept + self.commands['maybe-accept'] = maybe_accept + + def after_command(self, cmd): + super(ReadlineAlikeReader, self).after_command(cmd) + if self.more_lines is None: + # Force single-line input if we are in raw_input() mode. + # Although there is no direct way to add a \n in this mode, + # multiline buffers can still show up using various + # commands, e.g. navigating the history. + try: + index = self.buffer.index("\n") + except ValueError: + pass + else: + self.buffer = self.buffer[:index] + if self.pos > len(self.buffer): + self.pos = len(self.buffer) + +class maybe_accept(commands.Command): + def do(self): + r = self.reader + r.dirty = 1 # this is needed to hide the completion menu, if visible + # + # if there are already several lines and the cursor + # is not on the last one, always insert a new \n. + text = r.get_unicode() + if "\n" in r.buffer[r.pos:]: + r.insert("\n") + elif r.more_lines is not None and r.more_lines(text): + r.insert("\n") + else: + self.finish = 1 + +# ____________________________________________________________ + +class _ReadlineWrapper(object): + f_in = 0 + f_out = 1 + reader = None + saved_history_length = -1 + startup_hook = None + config = ReadlineConfig() + + def get_reader(self): + if self.reader is None: + console = UnixConsole(self.f_in, self.f_out, encoding=ENCODING) + self.reader = ReadlineAlikeReader(console) + self.reader.config = self.config + return self.reader + + def raw_input(self, prompt=''): + try: + reader = self.get_reader() + except _error: + return _old_raw_input(prompt) + if self.startup_hook is not None: + self.startup_hook() + reader.ps1 = prompt + return reader.readline() + + def multiline_input(self, more_lines, ps1, ps2): + """Read an input on possibly multiple lines, asking for more + lines as long as 'more_lines(unicodetext)' returns an object whose + boolean value is true. + """ + reader = self.get_reader() + saved = reader.more_lines + try: + reader.more_lines = more_lines + reader.ps1 = reader.ps2 = ps1 + reader.ps3 = reader.ps4 = ps2 + return reader.readline() + finally: + reader.more_lines = saved + + def parse_and_bind(self, string): + pass # XXX we don't support parsing GNU-readline-style init files + + def set_completer(self, function=None): + self.config.readline_completer = function + + def get_completer(self): + return self.config.readline_completer + + def set_completer_delims(self, string): + self.config.completer_delims = dict.fromkeys(string) + + def get_completer_delims(self): + chars = self.config.completer_delims.keys() + chars.sort() + return ''.join(chars) + + def _histline(self, line): + return unicode(line.rstrip('\n'), ENCODING) + + def get_history_length(self): + return self.saved_history_length + + def set_history_length(self, length): + self.saved_history_length = length + + def get_current_history_length(self): + return len(self.get_reader().history) + + def read_history_file(self, filename='~/.history'): + # multiline extension (really a hack) for the end of lines that + # are actually continuations inside a single multiline_input() + # history item: we use \r\n instead of just \n. If the history + # file is passed to GNU readline, the extra \r are just ignored. + history = self.get_reader().history + f = open(os.path.expanduser(filename), 'r') + buffer = [] + for line in f: + if line.endswith('\r\n'): + buffer.append(line) + else: + line = self._histline(line) + if buffer: + line = ''.join(buffer).replace('\r', '') + line + del buffer[:] + if line: + history.append(line) + f.close() + + def write_history_file(self, filename='~/.history'): + maxlength = self.saved_history_length + history = self.get_reader().get_trimmed_history(maxlength) + f = open(os.path.expanduser(filename), 'w') + for entry in history: + if isinstance(entry, unicode): + entry = entry.encode(ENCODING) + entry = entry.replace('\n', '\r\n') # multiline history support + f.write(entry + '\n') + f.close() + + def clear_history(self): + del self.get_reader().history[:] + + def get_history_item(self, index): + history = self.get_reader().history + if 1 <= index <= len(history): + return history[index-1] + else: + return None # blame readline.c for not raising + + def remove_history_item(self, index): + history = self.get_reader().history + if 0 <= index < len(history): + del history[index] + else: + raise ValueError("No history item at position %d" % index) + # blame readline.c for raising ValueError + + def replace_history_item(self, index, line): + history = self.get_reader().history + if 0 <= index < len(history): + history[index] = self._histline(line) + else: + raise ValueError("No history item at position %d" % index) + # blame readline.c for raising ValueError + + def add_history(self, line): + self.get_reader().history.append(self._histline(line)) + + def set_startup_hook(self, function=None): + self.startup_hook = function + + def get_line_buffer(self): + return self.get_reader().get_buffer() + + def _get_idxs(self): + start = cursor = self.get_reader().pos + buf = self.get_line_buffer() + for i in xrange(cursor - 1, -1, -1): + if buf[i] in self.get_completer_delims(): + break + start = i + return start, cursor + + def get_begidx(self): + return self._get_idxs()[0] + + def get_endidx(self): + return self._get_idxs()[1] + + def insert_text(self, text): + return self.get_reader().insert(text) + + +_wrapper = _ReadlineWrapper() + +# ____________________________________________________________ +# Public API + +parse_and_bind = _wrapper.parse_and_bind +set_completer = _wrapper.set_completer +get_completer = _wrapper.get_completer +set_completer_delims = _wrapper.set_completer_delims +get_completer_delims = _wrapper.get_completer_delims +get_history_length = _wrapper.get_history_length +set_history_length = _wrapper.set_history_length +get_current_history_length = _wrapper.get_current_history_length +read_history_file = _wrapper.read_history_file +write_history_file = _wrapper.write_history_file +clear_history = _wrapper.clear_history +get_history_item = _wrapper.get_history_item +remove_history_item = _wrapper.remove_history_item +replace_history_item = _wrapper.replace_history_item +add_history = _wrapper.add_history +set_startup_hook = _wrapper.set_startup_hook +get_line_buffer = _wrapper.get_line_buffer +get_begidx = _wrapper.get_begidx +get_endidx = _wrapper.get_endidx +insert_text = _wrapper.insert_text + +# Extension +multiline_input = _wrapper.multiline_input + +# Internal hook +_get_reader = _wrapper.get_reader + +# ____________________________________________________________ +# Stubs + +def _make_stub(_name, _ret): + def stub(*args, **kwds): + import warnings + warnings.warn("readline.%s() not implemented" % _name, stacklevel=2) + stub.func_name = _name + globals()[_name] = stub + +for _name, _ret in [ + ('read_init_file', None), + ('redisplay', None), + ('set_pre_input_hook', None), + ]: + assert _name not in globals(), _name + _make_stub(_name, _ret) + +# ____________________________________________________________ + +def _setup(): + global _old_raw_input + try: + f_in = sys.stdin.fileno() + f_out = sys.stdout.fileno() + except (AttributeError, ValueError): + return + if not os.isatty(f_in) or not os.isatty(f_out): + return + + _wrapper.f_in = f_in + _wrapper.f_out = f_out + + if hasattr(sys, '__raw_input__'): # PyPy + _old_raw_input = sys.__raw_input__ + sys.__raw_input__ = _wrapper.raw_input + else: + # this is not really what readline.c does. Better than nothing I guess + import __builtin__ + _old_raw_input = __builtin__.raw_input + __builtin__.raw_input = _wrapper.raw_input + +_setup() diff --git a/lib_pypy/pyrepl/test/test_functional.py b/lib_pypy/pyrepl/test/test_functional.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/test/test_functional.py @@ -0,0 +1,50 @@ +# Copyright 2000-2007 Michael Hudson-Doyle +# Maciek Fijalkowski +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# some functional tests, to see if this is really working + +import py +import sys + +class TestTerminal(object): + def _spawn(self, *args, **kwds): + try: + import pexpect + except ImportError, e: + py.test.skip(str(e)) + kwds.setdefault('timeout', 10) + child = pexpect.spawn(*args, **kwds) + child.logfile = sys.stdout + return child + + def spawn(self, argv=[]): + # avoid running start.py, cause it might contain + # things like readline or rlcompleter(2) included + child = self._spawn(sys.executable, ['-S'] + argv) + child.sendline('from pyrepl.python_reader import main') + child.sendline('main()') + return child + + def test_basic(self): + child = self.spawn() + child.sendline('a = 3') + child.sendline('a') + child.expect('3') + diff --git a/lib_pypy/pyrepl/keymap.py b/lib_pypy/pyrepl/keymap.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/keymap.py @@ -0,0 +1,186 @@ +# Copyright 2000-2008 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +""" +functions for parsing keyspecs + +Support for turning keyspecs into appropriate sequences. + +pyrepl uses it's own bastardized keyspec format, which is meant to be +a strict superset of readline's \"KEYSEQ\" format (which is to say +that if you can come up with a spec readline accepts that this +doesn't, you've found a bug and should tell me about it). + +Note that this is the `\\C-o' style of readline keyspec, not the +`Control-o' sort. + +A keyspec is a string representing a sequence of keypresses that can +be bound to a command. + +All characters other than the backslash represent themselves. In the +traditional manner, a backslash introduces a escape sequence. + +The extension to readline is that the sequence \\ denotes the +sequence of charaters produced by hitting KEY. + +Examples: + +`a' - what you get when you hit the `a' key +`\\EOA' - Escape - O - A (up, on my terminal) +`\\' - the up arrow key +`\\' - ditto (keynames are case insensitive) +`\\C-o', `\\c-o' - control-o +`\\M-.' - meta-period +`\\E.' - ditto (that's how meta works for pyrepl) +`\\', `\\', `\\t', `\\011', '\\x09', '\\X09', '\\C-i', '\\C-I' + - all of these are the tab character. Can you think of any more? +""" + +_escapes = { + '\\':'\\', + "'":"'", + '"':'"', + 'a':'\a', + 'b':'\h', + 'e':'\033', + 'f':'\f', + 'n':'\n', + 'r':'\r', + 't':'\t', + 'v':'\v' + } + +_keynames = { + 'backspace': 'backspace', + 'delete': 'delete', + 'down': 'down', + 'end': 'end', + 'enter': '\r', + 'escape': '\033', + 'f1' : 'f1', 'f2' : 'f2', 'f3' : 'f3', 'f4' : 'f4', + 'f5' : 'f5', 'f6' : 'f6', 'f7' : 'f7', 'f8' : 'f8', + 'f9' : 'f9', 'f10': 'f10', 'f11': 'f11', 'f12': 'f12', + 'f13': 'f13', 'f14': 'f14', 'f15': 'f15', 'f16': 'f16', + 'f17': 'f17', 'f18': 'f18', 'f19': 'f19', 'f20': 'f20', + 'home': 'home', + 'insert': 'insert', + 'left': 'left', + 'page down': 'page down', + 'page up': 'page up', + 'return': '\r', + 'right': 'right', + 'space': ' ', + 'tab': '\t', + 'up': 'up', + } + +class KeySpecError(Exception): + pass + +def _parse_key1(key, s): + ctrl = 0 + meta = 0 + ret = '' + while not ret and s < len(key): + if key[s] == '\\': + c = key[s+1].lower() + if _escapes.has_key(c): + ret = _escapes[c] + s += 2 + elif c == "c": + if key[s + 2] != '-': + raise KeySpecError, \ + "\\C must be followed by `-' (char %d of %s)"%( + s + 2, repr(key)) + if ctrl: + raise KeySpecError, "doubled \\C- (char %d of %s)"%( + s + 1, repr(key)) + ctrl = 1 + s += 3 + elif c == "m": + if key[s + 2] != '-': + raise KeySpecError, \ + "\\M must be followed by `-' (char %d of %s)"%( + s + 2, repr(key)) + if meta: + raise KeySpecError, "doubled \\M- (char %d of %s)"%( + s + 1, repr(key)) + meta = 1 + s += 3 + elif c.isdigit(): + n = key[s+1:s+4] + ret = chr(int(n, 8)) + s += 4 + elif c == 'x': + n = key[s+2:s+4] + ret = chr(int(n, 16)) + s += 4 + elif c == '<': + t = key.find('>', s) + if t == -1: + raise KeySpecError, \ + "unterminated \\< starting at char %d of %s"%( + s + 1, repr(key)) + ret = key[s+2:t].lower() + if ret not in _keynames: + raise KeySpecError, \ + "unrecognised keyname `%s' at char %d of %s"%( + ret, s + 2, repr(key)) + ret = _keynames[ret] + s = t + 1 + else: + raise KeySpecError, \ + "unknown backslash escape %s at char %d of %s"%( + `c`, s + 2, repr(key)) + else: + ret = key[s] + s += 1 + if ctrl: + if len(ret) > 1: + raise KeySpecError, "\\C- must be followed by a character" + ret = chr(ord(ret) & 0x1f) # curses.ascii.ctrl() + if meta: + ret = ['\033', ret] + else: + ret = [ret] + return ret, s + +def parse_keys(key): + s = 0 + r = [] + while s < len(key): + k, s = _parse_key1(key, s) + r.extend(k) + return r + +def compile_keymap(keymap, empty=''): + r = {} + for key, value in keymap.items(): + r.setdefault(key[0], {})[key[1:]] = value + for key, value in r.items(): + if empty in value: + if len(value) <> 1: + raise KeySpecError, \ + "key definitions for %s clash"%(value.values(),) + else: + r[key] = value[empty] + else: + r[key] = compile_keymap(value, empty) + return r diff --git a/lib_pypy/pyrepl/curses.py b/lib_pypy/pyrepl/curses.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/curses.py @@ -0,0 +1,39 @@ + +# Copyright 2000-2010 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# Some try-import logic for two purposes: avoiding to bring in the whole +# pure Python curses package if possible; and, in _curses is not actually +# present, falling back to _minimal_curses (which is either a ctypes-based +# pure Python module or a PyPy built-in module). +try: + import _curses +except ImportError: + try: + import _minimal_curses as _curses + except ImportError: + # Who knows, maybe some environment has "curses" but not "_curses". + # If not, at least the following import gives a clean ImportError. + import _curses + +setupterm = _curses.setupterm +tigetstr = _curses.tigetstr +tparm = _curses.tparm +error = _curses.error diff --git a/lib_pypy/pyrepl/unix_eventqueue.py b/lib_pypy/pyrepl/unix_eventqueue.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/unix_eventqueue.py @@ -0,0 +1,86 @@ +# Copyright 2000-2008 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# Bah, this would be easier to test if curses/terminfo didn't have so +# much non-introspectable global state. + +from pyrepl import keymap +from pyrepl.console import Event +from pyrepl import curses +from termios import tcgetattr, VERASE +import os + +_keynames = { + "delete" : "kdch1", + "down" : "kcud1", + "end" : "kend", + "enter" : "kent", + "f1" : "kf1", "f2" : "kf2", "f3" : "kf3", "f4" : "kf4", + "f5" : "kf5", "f6" : "kf6", "f7" : "kf7", "f8" : "kf8", + "f9" : "kf9", "f10" : "kf10", "f11" : "kf11", "f12" : "kf12", + "f13" : "kf13", "f14" : "kf14", "f15" : "kf15", "f16" : "kf16", + "f17" : "kf17", "f18" : "kf18", "f19" : "kf19", "f20" : "kf20", + "home" : "khome", + "insert" : "kich1", + "left" : "kcub1", + "page down" : "knp", + "page up" : "kpp", + "right" : "kcuf1", + "up" : "kcuu1", + } + +class EventQueue(object): + def __init__(self, fd): + our_keycodes = {} + for key, tiname in _keynames.items(): + keycode = curses.tigetstr(tiname) + if keycode: + our_keycodes[keycode] = unicode(key) + if os.isatty(fd): + our_keycodes[tcgetattr(fd)[6][VERASE]] = u'backspace' + self.k = self.ck = keymap.compile_keymap(our_keycodes) + self.events = [] + self.buf = [] + def get(self): + if self.events: + return self.events.pop(0) + else: + return None + def empty(self): + return not self.events + def insert(self, event): + self.events.append(event) + def push(self, char): + if char in self.k: + k = self.k[char] + if isinstance(k, dict): + self.buf.append(char) + self.k = k + else: + self.events.append(Event('key', k, ''.join(self.buf) + char)) + self.buf = [] + self.k = self.ck + elif self.buf: + self.events.extend([Event('key', c, c) for c in self.buf]) + self.buf = [] + self.k = self.ck + self.push(char) + else: + self.events.append(Event('key', char, char)) diff --git a/lib_pypy/pyrepl/__init__.py b/lib_pypy/pyrepl/__init__.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/__init__.py @@ -0,0 +1,19 @@ +# Copyright 2000-2008 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/lib_pypy/pyrepl/completing_reader.py b/lib_pypy/pyrepl/completing_reader.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/completing_reader.py @@ -0,0 +1,280 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Antonio Cuni +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl import commands, reader +from pyrepl.reader import Reader + +def uniqify(l): + d = {} + for i in l: + d[i] = 1 + r = d.keys() + r.sort() + return r + +def prefix(wordlist, j = 0): + d = {} + i = j + try: + while 1: + for word in wordlist: + d[word[i]] = 1 + if len(d) > 1: + return wordlist[0][j:i] + i += 1 + d = {} + except IndexError: + return wordlist[0][j:i] + +import re +def stripcolor(s): + return stripcolor.regexp.sub('', s) +stripcolor.regexp = re.compile(r"\x1B\[([0-9]{1,3}(;[0-9]{1,2})?)?[m|K]") + +def real_len(s): + return len(stripcolor(s)) + +def left_align(s, maxlen): + stripped = stripcolor(s) + if len(stripped) > maxlen: + # too bad, we remove the color + return stripped[:maxlen] + padding = maxlen - len(stripped) + return s + ' '*padding + +def build_menu(cons, wordlist, start, use_brackets, sort_in_column): + if use_brackets: + item = "[ %s ]" + padding = 4 + else: + item = "%s " + padding = 2 + maxlen = min(max(map(real_len, wordlist)), cons.width - padding) + cols = cons.width / (maxlen + padding) + rows = (len(wordlist) - 1)/cols + 1 + + if sort_in_column: + # sort_in_column=False (default) sort_in_column=True + # A B C A D G + # D E F B E + # G C F + # + # "fill" the table with empty words, so we always have the same amout + # of rows for each column + missing = cols*rows - len(wordlist) + wordlist = wordlist + ['']*missing + indexes = [(i%cols)*rows + i//cols for i in range(len(wordlist))] + wordlist = [wordlist[i] for i in indexes] + menu = [] + i = start + for r in range(rows): + row = [] + for col in range(cols): + row.append(item % left_align(wordlist[i], maxlen)) + i += 1 + if i >= len(wordlist): + break + menu.append( ''.join(row) ) + if i >= len(wordlist): + i = 0 + break + if r + 5 > cons.height: + menu.append(" %d more... "%(len(wordlist) - i)) + break + return menu, i + +# this gets somewhat user interface-y, and as a result the logic gets +# very convoluted. +# +# To summarise the summary of the summary:- people are a problem. +# -- The Hitch-Hikers Guide to the Galaxy, Episode 12 + +#### Desired behaviour of the completions commands. +# the considerations are: +# (1) how many completions are possible +# (2) whether the last command was a completion +# (3) if we can assume that the completer is going to return the same set of +# completions: this is controlled by the ``assume_immutable_completions`` +# variable on the reader, which is True by default to match the historical +# behaviour of pyrepl, but e.g. False in the ReadlineAlikeReader to match +# more closely readline's semantics (this is needed e.g. by +# fancycompleter) +# +# if there's no possible completion, beep at the user and point this out. +# this is easy. +# +# if there's only one possible completion, stick it in. if the last thing +# user did was a completion, point out that he isn't getting anywhere, but +# only if the ``assume_immutable_completions`` is True. +# +# now it gets complicated. +# +# for the first press of a completion key: +# if there's a common prefix, stick it in. + +# irrespective of whether anything got stuck in, if the word is now +# complete, show the "complete but not unique" message + +# if there's no common prefix and if the word is not now complete, +# beep. + +# common prefix -> yes no +# word complete \/ +# yes "cbnu" "cbnu" +# no - beep + +# for the second bang on the completion key +# there will necessarily be no common prefix +# show a menu of the choices. + +# for subsequent bangs, rotate the menu around (if there are sufficient +# choices). + +class complete(commands.Command): + def do(self): + r = self.reader + stem = r.get_stem() + if r.assume_immutable_completions and \ + r.last_command_is(self.__class__): + completions = r.cmpltn_menu_choices + else: + r.cmpltn_menu_choices = completions = \ + r.get_completions(stem) + if len(completions) == 0: + r.error("no matches") + elif len(completions) == 1: + if r.assume_immutable_completions and \ + len(completions[0]) == len(stem) and \ + r.last_command_is(self.__class__): + r.msg = "[ sole completion ]" + r.dirty = 1 + r.insert(completions[0][len(stem):]) + else: + p = prefix(completions, len(stem)) + if p <> '': + r.insert(p) + if r.last_command_is(self.__class__): + if not r.cmpltn_menu_vis: + r.cmpltn_menu_vis = 1 + r.cmpltn_menu, r.cmpltn_menu_end = build_menu( + r.console, completions, r.cmpltn_menu_end, + r.use_brackets, r.sort_in_column) + r.dirty = 1 + elif stem + p in completions: + r.msg = "[ complete but not unique ]" + r.dirty = 1 + else: + r.msg = "[ not unique ]" + r.dirty = 1 + +class self_insert(commands.self_insert): + def do(self): + commands.self_insert.do(self) + r = self.reader + if r.cmpltn_menu_vis: + stem = r.get_stem() + if len(stem) < 1: + r.cmpltn_reset() + else: + completions = [w for w in r.cmpltn_menu_choices + if w.startswith(stem)] + if completions: + r.cmpltn_menu, r.cmpltn_menu_end = build_menu( + r.console, completions, 0, + r.use_brackets, r.sort_in_column) + else: + r.cmpltn_reset() + +class CompletingReader(Reader): + """Adds completion support + + Adds instance variables: + * cmpltn_menu, cmpltn_menu_vis, cmpltn_menu_end, cmpltn_choices: + * + """ + # see the comment for the complete command + assume_immutable_completions = True + use_brackets = True # display completions inside [] + sort_in_column = False + + def collect_keymap(self): + return super(CompletingReader, self).collect_keymap() + ( + (r'\t', 'complete'),) + + def __init__(self, console): + super(CompletingReader, self).__init__(console) + self.cmpltn_menu = ["[ menu 1 ]", "[ menu 2 ]"] + self.cmpltn_menu_vis = 0 + self.cmpltn_menu_end = 0 + for c in [complete, self_insert]: + self.commands[c.__name__] = c + self.commands[c.__name__.replace('_', '-')] = c + + def after_command(self, cmd): + super(CompletingReader, self).after_command(cmd) + if not isinstance(cmd, complete) and not isinstance(cmd, self_insert): + self.cmpltn_reset() + + def calc_screen(self): + screen = super(CompletingReader, self).calc_screen() + if self.cmpltn_menu_vis: + ly = self.lxy[1] + screen[ly:ly] = self.cmpltn_menu + self.screeninfo[ly:ly] = [(0, [])]*len(self.cmpltn_menu) + self.cxy = self.cxy[0], self.cxy[1] + len(self.cmpltn_menu) + return screen + + def finish(self): + super(CompletingReader, self).finish() + self.cmpltn_reset() + + def cmpltn_reset(self): + self.cmpltn_menu = [] + self.cmpltn_menu_vis = 0 + self.cmpltn_menu_end = 0 + self.cmpltn_menu_choices = [] + + def get_stem(self): + st = self.syntax_table + SW = reader.SYNTAX_WORD + b = self.buffer + p = self.pos - 1 + while p >= 0 and st.get(b[p], SW) == SW: + p -= 1 + return u''.join(b[p+1:self.pos]) + + def get_completions(self, stem): + return [] + +def test(): + class TestReader(CompletingReader): + def get_completions(self, stem): + return [s for l in map(lambda x:x.split(),self.history) + for s in l if s and s.startswith(stem)] + reader = TestReader() + reader.ps1 = "c**> " + reader.ps2 = "c/*> " + reader.ps3 = "c|*> " + reader.ps4 = "c\*> " + while reader.readline(): + pass + +if __name__=='__main__': + test() diff --git a/lib_pypy/pyrepl/tests/__init__.py b/lib_pypy/pyrepl/tests/__init__.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/tests/__init__.py @@ -0,0 +1,20 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# moo diff --git a/lib_pypy/pyrepl/cmdrepl.py b/lib_pypy/pyrepl/cmdrepl.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/cmdrepl.py @@ -0,0 +1,118 @@ +# Copyright 2000-2007 Michael Hudson-Doyle +# Maciek Fijalkowski +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""Wedge pyrepl behaviour into cmd.Cmd-derived classes. + +replize, when given a subclass of cmd.Cmd, returns a class that +behaves almost identically to the supplied class, except that it uses +pyrepl instead if raw_input. + +It was designed to let you do this: + +>>> import pdb +>>> from pyrepl import replize +>>> pdb.Pdb = replize(pdb.Pdb) + +which is in fact done by the `pythoni' script that comes with +pyrepl.""" + +from __future__ import nested_scopes + +from pyrepl import completing_reader as cr, reader, completer +from pyrepl.completing_reader import CompletingReader as CR +import cmd + +class CmdReader(CR): + def collect_keymap(self): + return super(CmdReader, self).collect_keymap() + ( + ("\\M-\\n", "invalid-key"), + ("\\n", "accept")) + + CR_init = CR.__init__ + def __init__(self, completions): + self.CR_init(self) + self.completions = completions + + def get_completions(self, stem): + if len(stem) != self.pos: + return [] + return cr.uniqify([s for s in self.completions + if s.startswith(stem)]) + +def replize(klass, history_across_invocations=1): + + """Return a subclass of the cmd.Cmd-derived klass that uses + pyrepl instead of readline. + + Raises a ValueError if klass does not derive from cmd.Cmd. + + The optional history_across_invocations parameter (default 1) + controls whether instances of the returned class share + histories.""" + + completions = [s[3:] + for s in completer.get_class_members(klass) + if s.startswith("do_")] + + if not issubclass(klass, cmd.Cmd): + raise Exception +# if klass.cmdloop.im_class is not cmd.Cmd: +# print "this may not work" + + class CmdRepl(klass): + k_init = klass.__init__ + + if history_across_invocations: + _CmdRepl__history = [] + def __init__(self, *args, **kw): + self.k_init(*args, **kw) + self.__reader = CmdReader(completions) + self.__reader.history = CmdRepl._CmdRepl__history + self.__reader.historyi = len(CmdRepl._CmdRepl__history) + else: + def __init__(self, *args, **kw): + self.k_init(*args, **kw) + self.__reader = CmdReader(completions) + + def cmdloop(self, intro=None): + self.preloop() + if intro is not None: + self.intro = intro + if self.intro: + print self.intro + stop = None + while not stop: + if self.cmdqueue: + line = self.cmdqueue[0] + del self.cmdqueue[0] + else: + try: + self.__reader.ps1 = self.prompt + line = self.__reader.readline() + except EOFError: + line = "EOF" + line = self.precmd(line) + stop = self.onecmd(line) + stop = self.postcmd(stop, line) + self.postloop() + + CmdRepl.__name__ = "replize(%s.%s)"%(klass.__module__, klass.__name__) + return CmdRepl + diff --git a/lib_pypy/pyrepl/historical_reader.py b/lib_pypy/pyrepl/historical_reader.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/historical_reader.py @@ -0,0 +1,311 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl import reader, commands +from pyrepl.reader import Reader as R + +isearch_keymap = tuple( + [('\\%03o'%c, 'isearch-end') for c in range(256) if chr(c) != '\\'] + \ + [(c, 'isearch-add-character') + for c in map(chr, range(32, 127)) if c != '\\'] + \ + [('\\%03o'%c, 'isearch-add-character') + for c in range(256) if chr(c).isalpha() and chr(c) != '\\'] + \ + [('\\\\', 'self-insert'), + (r'\C-r', 'isearch-backwards'), + (r'\C-s', 'isearch-forwards'), + (r'\C-c', 'isearch-cancel'), + (r'\C-g', 'isearch-cancel'), + (r'\', 'isearch-backspace')]) + +del c + +ISEARCH_DIRECTION_NONE = '' +ISEARCH_DIRECTION_BACKWARDS = 'r' +ISEARCH_DIRECTION_FORWARDS = 'f' + +class next_history(commands.Command): + def do(self): + r = self.reader + if r.historyi == len(r.history): + r.error("end of history list") + return + r.select_item(r.historyi + 1) + +class previous_history(commands.Command): + def do(self): + r = self.reader + if r.historyi == 0: + r.error("start of history list") + return + r.select_item(r.historyi - 1) + +class restore_history(commands.Command): + def do(self): + r = self.reader + if r.historyi != len(r.history): + if r.get_unicode() != r.history[r.historyi]: + r.buffer = list(r.history[r.historyi]) + r.pos = len(r.buffer) + r.dirty = 1 + +class first_history(commands.Command): + def do(self): + self.reader.select_item(0) + +class last_history(commands.Command): + def do(self): + self.reader.select_item(len(self.reader.history)) + +class operate_and_get_next(commands.FinishCommand): + def do(self): + self.reader.next_history = self.reader.historyi + 1 + +class yank_arg(commands.Command): + def do(self): + r = self.reader + if r.last_command is self.__class__: + r.yank_arg_i += 1 + else: + r.yank_arg_i = 0 + if r.historyi < r.yank_arg_i: + r.error("beginning of history list") + return + a = r.get_arg(-1) + # XXX how to split? + words = r.get_item(r.historyi - r.yank_arg_i - 1).split() + if a < -len(words) or a >= len(words): + r.error("no such arg") + return + w = words[a] + b = r.buffer + if r.yank_arg_i > 0: + o = len(r.yank_arg_yanked) + else: + o = 0 + b[r.pos - o:r.pos] = list(w) + r.yank_arg_yanked = w + r.pos += len(w) - o + r.dirty = 1 + +class forward_history_isearch(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_FORWARDS + r.isearch_start = r.historyi, r.pos + r.isearch_term = '' + r.dirty = 1 + r.push_input_trans(r.isearch_trans) + + +class reverse_history_isearch(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_BACKWARDS + r.dirty = 1 + r.isearch_term = '' + r.push_input_trans(r.isearch_trans) + r.isearch_start = r.historyi, r.pos + +class isearch_cancel(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_NONE + r.pop_input_trans() + r.select_item(r.isearch_start[0]) + r.pos = r.isearch_start[1] + r.dirty = 1 + +class isearch_add_character(commands.Command): + def do(self): + r = self.reader + b = r.buffer + r.isearch_term += self.event[-1] + r.dirty = 1 + p = r.pos + len(r.isearch_term) - 1 + if b[p:p+1] != [r.isearch_term[-1]]: + r.isearch_next() + +class isearch_backspace(commands.Command): + def do(self): + r = self.reader + if len(r.isearch_term) > 0: + r.isearch_term = r.isearch_term[:-1] + r.dirty = 1 + else: + r.error("nothing to rubout") + +class isearch_forwards(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_FORWARDS + r.isearch_next() + +class isearch_backwards(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_BACKWARDS + r.isearch_next() + +class isearch_end(commands.Command): + def do(self): + r = self.reader + r.isearch_direction = ISEARCH_DIRECTION_NONE + r.console.forgetinput() + r.pop_input_trans() + r.dirty = 1 + +class HistoricalReader(R): + """Adds history support (with incremental history searching) to the + Reader class. + + Adds the following instance variables: + * history: + a list of strings + * historyi: + * transient_history: + * next_history: + * isearch_direction, isearch_term, isearch_start: + * yank_arg_i, yank_arg_yanked: + used by the yank-arg command; not actually manipulated by any + HistoricalReader instance methods. + """ + + def collect_keymap(self): + return super(HistoricalReader, self).collect_keymap() + ( + (r'\C-n', 'next-history'), + (r'\C-p', 'previous-history'), + (r'\C-o', 'operate-and-get-next'), + (r'\C-r', 'reverse-history-isearch'), + (r'\C-s', 'forward-history-isearch'), + (r'\M-r', 'restore-history'), + (r'\M-.', 'yank-arg'), + (r'\', 'last-history'), + (r'\', 'first-history')) + + + def __init__(self, console): + super(HistoricalReader, self).__init__(console) + self.history = [] + self.historyi = 0 + self.transient_history = {} + self.next_history = None + self.isearch_direction = ISEARCH_DIRECTION_NONE + for c in [next_history, previous_history, restore_history, + first_history, last_history, yank_arg, + forward_history_isearch, reverse_history_isearch, + isearch_end, isearch_add_character, isearch_cancel, + isearch_add_character, isearch_backspace, + isearch_forwards, isearch_backwards, operate_and_get_next]: + self.commands[c.__name__] = c + self.commands[c.__name__.replace('_', '-')] = c + from pyrepl import input + self.isearch_trans = input.KeymapTranslator( + isearch_keymap, invalid_cls=isearch_end, + character_cls=isearch_add_character) + + def select_item(self, i): + self.transient_history[self.historyi] = self.get_unicode() + buf = self.transient_history.get(i) + if buf is None: + buf = self.history[i] + self.buffer = list(buf) + self.historyi = i + self.pos = len(self.buffer) + self.dirty = 1 + + def get_item(self, i): + if i <> len(self.history): + return self.transient_history.get(i, self.history[i]) + else: + return self.transient_history.get(i, self.get_unicode()) + + def prepare(self): + super(HistoricalReader, self).prepare() + try: + self.transient_history = {} + if self.next_history is not None \ + and self.next_history < len(self.history): + self.historyi = self.next_history + self.buffer[:] = list(self.history[self.next_history]) + self.pos = len(self.buffer) + self.transient_history[len(self.history)] = '' + else: + self.historyi = len(self.history) + self.next_history = None + except: + self.restore() + raise + + def get_prompt(self, lineno, cursor_on_line): + if cursor_on_line and self.isearch_direction <> ISEARCH_DIRECTION_NONE: + d = 'rf'[self.isearch_direction == ISEARCH_DIRECTION_FORWARDS] + return "(%s-search `%s') "%(d, self.isearch_term) + else: + return super(HistoricalReader, self).get_prompt(lineno, cursor_on_line) + + def isearch_next(self): + st = self.isearch_term + p = self.pos + i = self.historyi + s = self.get_unicode() + forwards = self.isearch_direction == ISEARCH_DIRECTION_FORWARDS + while 1: + if forwards: + p = s.find(st, p + 1) + else: + p = s.rfind(st, 0, p + len(st) - 1) + if p != -1: + self.select_item(i) + self.pos = p + return + elif ((forwards and i == len(self.history) - 1) + or (not forwards and i == 0)): + self.error("not found") + return + else: + if forwards: + i += 1 + s = self.get_item(i) + p = -1 + else: + i -= 1 + s = self.get_item(i) + p = len(s) + + def finish(self): + super(HistoricalReader, self).finish() + ret = self.get_unicode() + for i, t in self.transient_history.items(): + if i < len(self.history) and i != self.historyi: + self.history[i] = t + if ret: + self.history.append(ret) + +def test(): + from pyrepl.unix_console import UnixConsole + reader = HistoricalReader(UnixConsole()) + reader.ps1 = "h**> " + reader.ps2 = "h/*> " + reader.ps3 = "h|*> " + reader.ps4 = "h\*> " + while reader.readline(): + pass + +if __name__=='__main__': + test() diff --git a/lib_pypy/pyrepl/pygame_keymap.py b/lib_pypy/pyrepl/pygame_keymap.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/pygame_keymap.py @@ -0,0 +1,250 @@ +# Copyright 2000-2008 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# keyspec parsing for a pygame console. currently this is simply copy +# n' change from the unix (ie. trad terminal) variant; probably some +# refactoring will happen when I work out how it will work best. + +# A key is represented as *either* + +# a) a (keycode, meta, ctrl) sequence (used for special keys such as +# f1, the up arrow key, etc) +# b) a (unichar, meta, ctrl) sequence (used for printable chars) + +# Because we allow keystokes like '\\C-xu', I'll use the same trick as +# the unix keymap module uses. + +# '\\C-a' --> (K_a, 0, 1) + +# XXX it's actually possible to test this module, so it should have a +# XXX test suite. + +from pygame.locals import * + +_escapes = { + '\\': K_BACKSLASH, + "'" : K_QUOTE, + '"' : K_QUOTEDBL, +# 'a' : '\a', + 'b' : K_BACKSLASH, + 'e' : K_ESCAPE, +# 'f' : '\f', + 'n' : K_RETURN, + 'r' : K_RETURN, + 't' : K_TAB, +# 'v' : '\v' + } + +_keynames = { + 'backspace' : K_BACKSPACE, + 'delete' : K_DELETE, + 'down' : K_DOWN, + 'end' : K_END, + 'enter' : K_KP_ENTER, + 'escape' : K_ESCAPE, + 'f1' : K_F1, 'f2' : K_F2, 'f3' : K_F3, 'f4' : K_F4, + 'f5' : K_F5, 'f6' : K_F6, 'f7' : K_F7, 'f8' : K_F8, + 'f9' : K_F9, 'f10': K_F10,'f11': K_F11,'f12': K_F12, + 'f13': K_F13,'f14': K_F14,'f15': K_F15, + 'home' : K_HOME, + 'insert' : K_INSERT, + 'left' : K_LEFT, + 'pgdown' : K_PAGEDOWN, 'page down' : K_PAGEDOWN, + 'pgup' : K_PAGEUP, 'page up' : K_PAGEUP, + 'return' : K_RETURN, + 'right' : K_RIGHT, + 'space' : K_SPACE, + 'tab' : K_TAB, + 'up' : K_UP, + } + +class KeySpecError(Exception): + pass + +def _parse_key1(key, s): + ctrl = 0 + meta = 0 + ret = '' + while not ret and s < len(key): + if key[s] == '\\': + c = key[s+1].lower() + if _escapes.has_key(c): + ret = _escapes[c] + s += 2 + elif c == "c": + if key[s + 2] != '-': + raise KeySpecError, \ + "\\C must be followed by `-' (char %d of %s)"%( + s + 2, repr(key)) + if ctrl: + raise KeySpecError, "doubled \\C- (char %d of %s)"%( + s + 1, repr(key)) + ctrl = 1 + s += 3 + elif c == "m": + if key[s + 2] != '-': + raise KeySpecError, \ + "\\M must be followed by `-' (char %d of %s)"%( + s + 2, repr(key)) + if meta: + raise KeySpecError, "doubled \\M- (char %d of %s)"%( + s + 1, repr(key)) + meta = 1 + s += 3 + elif c.isdigit(): + n = key[s+1:s+4] + ret = chr(int(n, 8)) + s += 4 + elif c == 'x': + n = key[s+2:s+4] + ret = chr(int(n, 16)) + s += 4 + elif c == '<': + t = key.find('>', s) + if t == -1: + raise KeySpecError, \ + "unterminated \\< starting at char %d of %s"%( + s + 1, repr(key)) + try: + ret = _keynames[key[s+2:t].lower()] + s = t + 1 + except KeyError: + raise KeySpecError, \ + "unrecognised keyname `%s' at char %d of %s"%( + key[s+2:t], s + 2, repr(key)) + if ret is None: + return None, s + else: + raise KeySpecError, \ + "unknown backslash escape %s at char %d of %s"%( + `c`, s + 2, repr(key)) + else: + if ctrl: + ret = chr(ord(key[s]) & 0x1f) # curses.ascii.ctrl() + ret = unicode(ret) + else: + ret = unicode(key[s]) + s += 1 + return (ret, meta, ctrl), s + +def parse_keys(key): + s = 0 + r = [] + while s < len(key): + k, s = _parse_key1(key, s) + if k is None: + return None + r.append(k) + return tuple(r) + +def _compile_keymap(keymap): + r = {} + for key, value in keymap.items(): + r.setdefault(key[0], {})[key[1:]] = value + for key, value in r.items(): + if value.has_key(()): + if len(value) <> 1: + raise KeySpecError, \ + "key definitions for %s clash"%(value.values(),) + else: + r[key] = value[()] + else: + r[key] = _compile_keymap(value) + return r + +def compile_keymap(keymap): + r = {} + for key, value in keymap: + k = parse_keys(key) + if value is None and r.has_key(k): + del r[k] + if k is not None: + r[k] = value + return _compile_keymap(r) + +def keyname(key): + longest_match = '' + longest_match_name = '' + for name, keyseq in keyset.items(): + if keyseq and key.startswith(keyseq) and \ + len(keyseq) > len(longest_match): + longest_match = keyseq + longest_match_name = name + if len(longest_match) > 0: + return longest_match_name, len(longest_match) + else: + return None, 0 + +_unescapes = {'\r':'\\r', '\n':'\\n', '\177':'^?'} + +#for k,v in _escapes.items(): +# _unescapes[v] = k + +def unparse_key(keyseq): + if not keyseq: + return '' + name, s = keyname(keyseq) + if name: + if name <> 'escape' or s == len(keyseq): + return '\\<' + name + '>' + unparse_key(keyseq[s:]) + else: + return '\\M-' + unparse_key(keyseq[1:]) + else: + c = keyseq[0] + r = keyseq[1:] + if c == '\\': + p = '\\\\' + elif _unescapes.has_key(c): + p = _unescapes[c] + elif ord(c) < ord(' '): + p = '\\C-%s'%(chr(ord(c)+96),) + elif ord(' ') <= ord(c) <= ord('~'): + p = c + else: + p = '\\%03o'%(ord(c),) + return p + unparse_key(r) + +def _unparse_keyf(keyseq): + if not keyseq: + return [] + name, s = keyname(keyseq) + if name: + if name <> 'escape' or s == len(keyseq): + return [name] + _unparse_keyf(keyseq[s:]) + else: + rest = _unparse_keyf(keyseq[1:]) + return ['M-'+rest[0]] + rest[1:] + else: + c = keyseq[0] + r = keyseq[1:] + if c == '\\': + p = '\\' + elif _unescapes.has_key(c): + p = _unescapes[c] + elif ord(c) < ord(' '): + p = 'C-%s'%(chr(ord(c)+96),) + elif ord(' ') <= ord(c) <= ord('~'): + p = c + else: + p = '\\%03o'%(ord(c),) + return [p] + _unparse_keyf(r) + +def unparse_keyf(keyseq): + return " ".join(_unparse_keyf(keyseq)) diff --git a/lib_pypy/pyrepl/tests/bugs.py b/lib_pypy/pyrepl/tests/bugs.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/tests/bugs.py @@ -0,0 +1,36 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl.console import Event +from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase + +# this test case should contain as-verbatim-as-possible versions of +# (applicable) bug reports + +class BugsTestCase(ReaderTestCase): + + def test_transpose_at_start(self): + self.run_test([( 'transpose', [EA, '']), + ( 'accept', [''])]) + +def test(): + run_testcase(BugsTestCase) + +if __name__ == '__main__': + test() diff --git a/lib_pypy/pyrepl/tests/infrastructure.py b/lib_pypy/pyrepl/tests/infrastructure.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/tests/infrastructure.py @@ -0,0 +1,82 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl.reader import Reader +from pyrepl.console import Console, Event +import unittest +import sys + +class EqualsAnything(object): + def __eq__(self, other): + return True +EA = EqualsAnything() + +class TestConsole(Console): + height = 24 + width = 80 + encoding = 'utf-8' + + def __init__(self, events, testcase, verbose=False): + self.events = events + self.next_screen = None + self.verbose = verbose + self.testcase = testcase + + def refresh(self, screen, xy): + if self.next_screen is not None: + self.testcase.assertEqual( + screen, self.next_screen, + "[ %s != %s after %r ]"%(screen, self.next_screen, + self.last_event_name)) + + def get_event(self, block=1): + ev, sc = self.events.pop(0) + self.next_screen = sc + if not isinstance(ev, tuple): + ev = (ev,) + self.last_event_name = ev[0] + if self.verbose: + print "event", ev + return Event(*ev) + +class TestReader(Reader): + def get_prompt(self, lineno, cursor_on_line): + return '' + def refresh(self): + Reader.refresh(self) + self.dirty = True + +class ReaderTestCase(unittest.TestCase): + def run_test(self, test_spec, reader_class=TestReader): + # remember to finish your test_spec with 'accept' or similar! + con = TestConsole(test_spec, self) + reader = reader_class(con) + reader.readline() + +class BasicTestRunner: + def run(self, test): + result = unittest.TestResult() + test(result) + return result + +def run_testcase(testclass): + suite = unittest.makeSuite(testclass) + runner = unittest.TextTestRunner(sys.stdout, verbosity=1) + result = runner.run(suite) + diff --git a/lib_pypy/pyrepl/unix_console.py b/lib_pypy/pyrepl/unix_console.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/unix_console.py @@ -0,0 +1,558 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Antonio Cuni +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import termios, select, os, struct, errno +import signal, re, time, sys +from fcntl import ioctl +from pyrepl import curses +from pyrepl.fancy_termios import tcgetattr, tcsetattr +from pyrepl.console import Console, Event +from pyrepl import unix_eventqueue + +_error = (termios.error, curses.error) + +# there are arguments for changing this to "refresh" +SIGWINCH_EVENT = 'repaint' + +FIONREAD = getattr(termios, "FIONREAD", None) +TIOCGWINSZ = getattr(termios, "TIOCGWINSZ", None) + +def _my_getstr(cap, optional=0): + r = curses.tigetstr(cap) + if not optional and r is None: + raise RuntimeError, \ + "terminal doesn't have the required '%s' capability"%cap + return r + +# at this point, can we say: AAAAAAAAAAAAAAAAAAAAAARGH! +def maybe_add_baudrate(dict, rate): + name = 'B%d'%rate + if hasattr(termios, name): + dict[getattr(termios, name)] = rate + +ratedict = {} +for r in [0, 110, 115200, 1200, 134, 150, 1800, 19200, 200, 230400, + 2400, 300, 38400, 460800, 4800, 50, 57600, 600, 75, 9600]: + maybe_add_baudrate(ratedict, r) + +del r, maybe_add_baudrate + +delayprog = re.compile("\\$<([0-9]+)((?:/|\\*){0,2})>") + +try: + poll = select.poll +except AttributeError: + # this is exactly the minumum necessary to support what we + # do with poll objects + class poll: + def __init__(self): + pass + def register(self, fd, flag): + self.fd = fd + def poll(self, timeout=None): + r,w,e = select.select([self.fd],[],[],timeout) + return r + +POLLIN = getattr(select, "POLLIN", None) + +class UnixConsole(Console): + def __init__(self, f_in=0, f_out=1, term=None, encoding=None): + if encoding is None: + encoding = sys.getdefaultencoding() + + self.encoding = encoding + + if isinstance(f_in, int): + self.input_fd = f_in + else: + self.input_fd = f_in.fileno() + + if isinstance(f_out, int): + self.output_fd = f_out + else: + self.output_fd = f_out.fileno() + + self.pollob = poll() + self.pollob.register(self.input_fd, POLLIN) + curses.setupterm(term, self.output_fd) + self.term = term + + self._bel = _my_getstr("bel") + self._civis = _my_getstr("civis", optional=1) + self._clear = _my_getstr("clear") + self._cnorm = _my_getstr("cnorm", optional=1) + self._cub = _my_getstr("cub", optional=1) + self._cub1 = _my_getstr("cub1", 1) + self._cud = _my_getstr("cud", 1) + self._cud1 = _my_getstr("cud1", 1) + self._cuf = _my_getstr("cuf", 1) + self._cuf1 = _my_getstr("cuf1", 1) + self._cup = _my_getstr("cup") + self._cuu = _my_getstr("cuu", 1) + self._cuu1 = _my_getstr("cuu1", 1) + self._dch1 = _my_getstr("dch1", 1) + self._dch = _my_getstr("dch", 1) + self._el = _my_getstr("el") + self._hpa = _my_getstr("hpa", 1) + self._ich = _my_getstr("ich", 1) + self._ich1 = _my_getstr("ich1", 1) + self._ind = _my_getstr("ind", 1) + self._pad = _my_getstr("pad", 1) + self._ri = _my_getstr("ri", 1) + self._rmkx = _my_getstr("rmkx", 1) + self._smkx = _my_getstr("smkx", 1) + + ## work out how we're going to sling the cursor around + if 0 and self._hpa: # hpa don't work in windows telnet :-( + self.__move_x = self.__move_x_hpa + elif self._cub and self._cuf: + self.__move_x = self.__move_x_cub_cuf + elif self._cub1 and self._cuf1: + self.__move_x = self.__move_x_cub1_cuf1 + else: + raise RuntimeError, "insufficient terminal (horizontal)" + + if self._cuu and self._cud: + self.__move_y = self.__move_y_cuu_cud + elif self._cuu1 and self._cud1: + self.__move_y = self.__move_y_cuu1_cud1 + else: + raise RuntimeError, "insufficient terminal (vertical)" + + if self._dch1: + self.dch1 = self._dch1 + elif self._dch: + self.dch1 = curses.tparm(self._dch, 1) + else: + self.dch1 = None + + if self._ich1: + self.ich1 = self._ich1 + elif self._ich: + self.ich1 = curses.tparm(self._ich, 1) + else: + self.ich1 = None + + self.__move = self.__move_short + + self.event_queue = unix_eventqueue.EventQueue(self.input_fd) + self.partial_char = '' + self.cursor_visible = 1 + + def change_encoding(self, encoding): + self.encoding = encoding + + def refresh(self, screen, (cx, cy)): + # this function is still too long (over 90 lines) + + if not self.__gone_tall: + while len(self.screen) < min(len(screen), self.height): + self.__hide_cursor() + self.__move(0, len(self.screen) - 1) + self.__write("\n") + self.__posxy = 0, len(self.screen) + self.screen.append("") + else: + while len(self.screen) < len(screen): + self.screen.append("") + + if len(screen) > self.height: + self.__gone_tall = 1 + self.__move = self.__move_tall + + px, py = self.__posxy + old_offset = offset = self.__offset + height = self.height + + if 0: + global counter + try: + counter + except NameError: + counter = 0 + self.__write_code(curses.tigetstr("setaf"), counter) + counter += 1 + if counter > 8: + counter = 0 + + # we make sure the cursor is on the screen, and that we're + # using all of the screen if we can + if cy < offset: + offset = cy + elif cy >= offset + height: + offset = cy - height + 1 + elif offset > 0 and len(screen) < offset + height: + offset = max(len(screen) - height, 0) + screen.append("") + + oldscr = self.screen[old_offset:old_offset + height] + newscr = screen[offset:offset + height] + + # use hardware scrolling if we have it. + if old_offset > offset and self._ri: + self.__hide_cursor() + self.__write_code(self._cup, 0, 0) + self.__posxy = 0, old_offset + for i in range(old_offset - offset): + self.__write_code(self._ri) + oldscr.pop(-1) + oldscr.insert(0, "") + elif old_offset < offset and self._ind: + self.__hide_cursor() + self.__write_code(self._cup, self.height - 1, 0) + self.__posxy = 0, old_offset + self.height - 1 + for i in range(offset - old_offset): + self.__write_code(self._ind) + oldscr.pop(0) + oldscr.append("") + + self.__offset = offset + + for y, oldline, newline, in zip(range(offset, offset + height), + oldscr, + newscr): + if oldline != newline: + self.__write_changed_line(y, oldline, newline, px) + + y = len(newscr) + while y < len(oldscr): + self.__hide_cursor() + self.__move(0, y) + self.__posxy = 0, y + self.__write_code(self._el) + y += 1 + + self.__show_cursor() + + self.screen = screen + self.move_cursor(cx, cy) + self.flushoutput() + + def __write_changed_line(self, y, oldline, newline, px): + # this is frustrating; there's no reason to test (say) + # self.dch1 inside the loop -- but alternative ways of + # structuring this function are equally painful (I'm trying to + # avoid writing code generators these days...) + x = 0 + minlen = min(len(oldline), len(newline)) + # + # reuse the oldline as much as possible, but stop as soon as we + # encounter an ESCAPE, because it might be the start of an escape + # sequene + while x < minlen and oldline[x] == newline[x] and newline[x] != '\x1b': + x += 1 + if oldline[x:] == newline[x+1:] and self.ich1: + if ( y == self.__posxy[1] and x > self.__posxy[0] + and oldline[px:x] == newline[px+1:x+1] ): + x = px + self.__move(x, y) + self.__write_code(self.ich1) + self.__write(newline[x]) + self.__posxy = x + 1, y + elif x < minlen and oldline[x + 1:] == newline[x + 1:]: + self.__move(x, y) + self.__write(newline[x]) + self.__posxy = x + 1, y + elif (self.dch1 and self.ich1 and len(newline) == self.width + and x < len(newline) - 2 + and newline[x+1:-1] == oldline[x:-2]): + self.__hide_cursor() + self.__move(self.width - 2, y) + self.__posxy = self.width - 2, y + self.__write_code(self.dch1) + self.__move(x, y) + self.__write_code(self.ich1) + self.__write(newline[x]) + self.__posxy = x + 1, y + else: + self.__hide_cursor() + self.__move(x, y) + if len(oldline) > len(newline): + self.__write_code(self._el) + self.__write(newline[x:]) + self.__posxy = len(newline), y + + def __write(self, text): + self.__buffer.append((text, 0)) + + def __write_code(self, fmt, *args): + self.__buffer.append((curses.tparm(fmt, *args), 1)) + + def __maybe_write_code(self, fmt, *args): + if fmt: + self.__write_code(fmt, *args) + + def __move_y_cuu1_cud1(self, y): + dy = y - self.__posxy[1] + if dy > 0: + self.__write_code(dy*self._cud1) + elif dy < 0: + self.__write_code((-dy)*self._cuu1) + + def __move_y_cuu_cud(self, y): + dy = y - self.__posxy[1] + if dy > 0: + self.__write_code(self._cud, dy) + elif dy < 0: + self.__write_code(self._cuu, -dy) + + def __move_x_hpa(self, x): + if x != self.__posxy[0]: + self.__write_code(self._hpa, x) + + def __move_x_cub1_cuf1(self, x): + dx = x - self.__posxy[0] + if dx > 0: + self.__write_code(self._cuf1*dx) + elif dx < 0: + self.__write_code(self._cub1*(-dx)) + + def __move_x_cub_cuf(self, x): + dx = x - self.__posxy[0] + if dx > 0: + self.__write_code(self._cuf, dx) + elif dx < 0: + self.__write_code(self._cub, -dx) + + def __move_short(self, x, y): + self.__move_x(x) + self.__move_y(y) + + def __move_tall(self, x, y): + assert 0 <= y - self.__offset < self.height, y - self.__offset + self.__write_code(self._cup, y - self.__offset, x) + + def move_cursor(self, x, y): + if y < self.__offset or y >= self.__offset + self.height: + self.event_queue.insert(Event('scroll', None)) + else: + self.__move(x, y) + self.__posxy = x, y + self.flushoutput() + + def prepare(self): + # per-readline preparations: + self.__svtermstate = tcgetattr(self.input_fd) + raw = self.__svtermstate.copy() + raw.iflag &=~ (termios.BRKINT | termios.INPCK | + termios.ISTRIP | termios.IXON) + raw.oflag &=~ (termios.OPOST) + raw.cflag &=~ (termios.CSIZE|termios.PARENB) + raw.cflag |= (termios.CS8) + raw.lflag &=~ (termios.ICANON|termios.ECHO| + termios.IEXTEN|(termios.ISIG*1)) + raw.cc[termios.VMIN] = 1 + raw.cc[termios.VTIME] = 0 + tcsetattr(self.input_fd, termios.TCSADRAIN, raw) + + self.screen = [] + self.height, self.width = self.getheightwidth() + + self.__buffer = [] + + self.__posxy = 0, 0 + self.__gone_tall = 0 + self.__move = self.__move_short + self.__offset = 0 + + self.__maybe_write_code(self._smkx) + + self.old_sigwinch = signal.signal( + signal.SIGWINCH, self.__sigwinch) + + def restore(self): + self.__maybe_write_code(self._rmkx) + self.flushoutput() + tcsetattr(self.input_fd, termios.TCSADRAIN, self.__svtermstate) + + signal.signal(signal.SIGWINCH, self.old_sigwinch) + + def __sigwinch(self, signum, frame): + self.height, self.width = self.getheightwidth() + self.event_queue.insert(Event('resize', None)) + + def push_char(self, char): + self.partial_char += char + try: + c = unicode(self.partial_char, self.encoding) + except UnicodeError, e: + if len(e.args) > 4 and \ + e.args[4] == 'unexpected end of data': + pass + else: + raise + else: + self.partial_char = '' + self.event_queue.push(c) + + def get_event(self, block=1): + while self.event_queue.empty(): + while 1: # All hail Unix! + try: + self.push_char(os.read(self.input_fd, 1)) + except (IOError, OSError), err: + if err.errno == errno.EINTR: + if not self.event_queue.empty(): + return self.event_queue.get() + else: + continue + else: + raise + else: + break + if not block: + break + return self.event_queue.get() + + def wait(self): + self.pollob.poll() + + def set_cursor_vis(self, vis): + if vis: + self.__show_cursor() + else: + self.__hide_cursor() + + def __hide_cursor(self): + if self.cursor_visible: + self.__maybe_write_code(self._civis) + self.cursor_visible = 0 + + def __show_cursor(self): + if not self.cursor_visible: + self.__maybe_write_code(self._cnorm) + self.cursor_visible = 1 + + def repaint_prep(self): + if not self.__gone_tall: + self.__posxy = 0, self.__posxy[1] + self.__write("\r") + ns = len(self.screen)*['\000'*self.width] + self.screen = ns + else: + self.__posxy = 0, self.__offset + self.__move(0, self.__offset) + ns = self.height*['\000'*self.width] + self.screen = ns + + if TIOCGWINSZ: + def getheightwidth(self): + try: + return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) + except KeyError: + height, width = struct.unpack( + "hhhh", ioctl(self.input_fd, TIOCGWINSZ, "\000"*8))[0:2] + if not height: return 25, 80 + return height, width + else: + def getheightwidth(self): + try: + return int(os.environ["LINES"]), int(os.environ["COLUMNS"]) + except KeyError: + return 25, 80 + + def forgetinput(self): + termios.tcflush(self.input_fd, termios.TCIFLUSH) + + def flushoutput(self): + for text, iscode in self.__buffer: + if iscode: + self.__tputs(text) + else: + os.write(self.output_fd, text.encode(self.encoding)) + del self.__buffer[:] + + def __tputs(self, fmt, prog=delayprog): + """A Python implementation of the curses tputs function; the + curses one can't really be wrapped in a sane manner. + + I have the strong suspicion that this is complexity that + will never do anyone any good.""" + # using .get() means that things will blow up + # only if the bps is actually needed (which I'm + # betting is pretty unlkely) + bps = ratedict.get(self.__svtermstate.ospeed) + while 1: + m = prog.search(fmt) + if not m: + os.write(self.output_fd, fmt) + break + x, y = m.span() + os.write(self.output_fd, fmt[:x]) + fmt = fmt[y:] + delay = int(m.group(1)) + if '*' in m.group(2): + delay *= self.height + if self._pad: + nchars = (bps*delay)/1000 + os.write(self.output_fd, self._pad*nchars) + else: + time.sleep(float(delay)/1000.0) + + def finish(self): + y = len(self.screen) - 1 + while y >= 0 and not self.screen[y]: + y -= 1 + self.__move(0, min(y, self.height + self.__offset - 1)) + self.__write("\n\r") + self.flushoutput() + + def beep(self): + self.__maybe_write_code(self._bel) + self.flushoutput() + + if FIONREAD: + def getpending(self): + e = Event('key', '', '') + + while not self.event_queue.empty(): + e2 = self.event_queue.get() + e.data += e2.data + e.raw += e.raw + + amount = struct.unpack( + "i", ioctl(self.input_fd, FIONREAD, "\0\0\0\0"))[0] + raw = unicode(os.read(self.input_fd, amount), self.encoding, 'replace') + e.data += raw + e.raw += raw + return e + else: + def getpending(self): + e = Event('key', '', '') + + while not self.event_queue.empty(): + e2 = self.event_queue.get() + e.data += e2.data + e.raw += e.raw + + amount = 10000 + raw = unicode(os.read(self.input_fd, amount), self.encoding, 'replace') + e.data += raw + e.raw += raw + return e + + def clear(self): + self.__write_code(self._clear) + self.__gone_tall = 1 + self.__move = self.__move_tall + self.__posxy = 0, 0 + self.screen = [] + diff --git a/lib_pypy/pyrepl/tests/wishes.py b/lib_pypy/pyrepl/tests/wishes.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/tests/wishes.py @@ -0,0 +1,38 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl.console import Event +from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase + +# this test case should contain as-verbatim-as-possible versions of +# (applicable) feature requests + +class WishesTestCase(ReaderTestCase): + + def test_quoted_insert_repeat(self): + self.run_test([(('digit-arg', '3'), ['']), + ( 'quoted-insert', ['']), + (('self-insert', '\033'), ['^[^[^[']), + ( 'accept', None)]) + +def test(): + run_testcase(WishesTestCase) + +if __name__ == '__main__': + test() diff --git a/lib_pypy/pyrepl/console.py b/lib_pypy/pyrepl/console.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/console.py @@ -0,0 +1,93 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +class Event: + """An Event. `evt' is 'key' or somesuch.""" + + def __init__(self, evt, data, raw=''): + self.evt = evt + self.data = data + self.raw = raw + + def __repr__(self): + return 'Event(%r, %r)'%(self.evt, self.data) + +class Console: + """Attributes: + + screen, + height, + width, + """ + + def refresh(self, screen, xy): + pass + + def prepare(self): + pass + + def restore(self): + pass + + def move_cursor(self, x, y): + pass + + def set_cursor_vis(self, vis): + pass + + def getheightwidth(self): + """Return (height, width) where height and width are the height + and width of the terminal window in characters.""" + pass + + def get_event(self, block=1): + """Return an Event instance. Returns None if |block| is false + and there is no event pending, otherwise waits for the + completion of an event.""" + pass + + def beep(self): + pass + + def clear(self): + """Wipe the screen""" + pass + + def finish(self): + """Move the cursor to the end of the display and otherwise get + ready for end. XXX could be merged with restore? Hmm.""" + pass + + def flushoutput(self): + """Flush all output to the screen (assuming there's some + buffering going on somewhere).""" + pass + + def forgetinput(self): + """Forget all pending, but not yet processed input.""" + pass + + def getpending(self): + """Return the characters that have been typed but not yet + processed.""" + pass + + def wait(self): + """Wait for an event.""" + pass diff --git a/lib_pypy/pyrepl/reader.py b/lib_pypy/pyrepl/reader.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/reader.py @@ -0,0 +1,585 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Antonio Cuni +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import types +from pyrepl import unicodedata_ +from pyrepl import commands +from pyrepl import input + +def _make_unctrl_map(): + uc_map = {} + for c in map(unichr, range(256)): + if unicodedata_.category(c)[0] <> 'C': + uc_map[c] = c + for i in range(32): + c = unichr(i) + uc_map[c] = u'^' + unichr(ord('A') + i - 1) + uc_map['\t'] = ' ' # display TABs as 4 characters + uc_map['\177'] = u'^?' + for i in range(256): + c = unichr(i) + if not uc_map.has_key(c): + uc_map[c] = u'\\%03o'%i + return uc_map + +# disp_str proved to be a bottleneck for large inputs, so it's been +# rewritten in C; it's not required though. +try: + raise ImportError # currently it's borked by the unicode support + + from _pyrepl_utils import disp_str, init_unctrl_map + + init_unctrl_map(_make_unctrl_map()) + + del init_unctrl_map +except ImportError: + def _my_unctrl(c, u=_make_unctrl_map()): + if c in u: + return u[c] + else: + if unicodedata_.category(c).startswith('C'): + return '\u%04x'%(ord(c),) + else: + return c + + def disp_str(buffer, join=''.join, uc=_my_unctrl): + """ disp_str(buffer:string) -> (string, [int]) + + Return the string that should be the printed represenation of + |buffer| and a list detailing where the characters of |buffer| + get used up. E.g.: + + >>> disp_str(chr(3)) + ('^C', [1, 0]) + + the list always contains 0s or 1s at present; it could conceivably + go higher as and when unicode support happens.""" + s = map(uc, buffer) + return (join(s), + map(ord, join(map(lambda x:'\001'+(len(x)-1)*'\000', s)))) + + del _my_unctrl + +del _make_unctrl_map + +# syntax classes: + +[SYNTAX_WHITESPACE, + SYNTAX_WORD, + SYNTAX_SYMBOL] = range(3) + +def make_default_syntax_table(): + # XXX perhaps should use some unicodedata here? + st = {} + for c in map(unichr, range(256)): + st[c] = SYNTAX_SYMBOL + for c in [a for a in map(unichr, range(256)) if a.isalpha()]: + st[c] = SYNTAX_WORD + st[u'\n'] = st[u' '] = SYNTAX_WHITESPACE + return st + +default_keymap = tuple( + [(r'\C-a', 'beginning-of-line'), + (r'\C-b', 'left'), + (r'\C-c', 'interrupt'), + (r'\C-d', 'delete'), + (r'\C-e', 'end-of-line'), + (r'\C-f', 'right'), + (r'\C-g', 'cancel'), + (r'\C-h', 'backspace'), + (r'\C-j', 'accept'), + (r'\', 'accept'), + (r'\C-k', 'kill-line'), + (r'\C-l', 'clear-screen'), + (r'\C-m', 'accept'), + (r'\C-q', 'quoted-insert'), + (r'\C-t', 'transpose-characters'), + (r'\C-u', 'unix-line-discard'), + (r'\C-v', 'quoted-insert'), + (r'\C-w', 'unix-word-rubout'), + (r'\C-x\C-u', 'upcase-region'), + (r'\C-y', 'yank'), + (r'\C-z', 'suspend'), + + (r'\M-b', 'backward-word'), + (r'\M-c', 'capitalize-word'), + (r'\M-d', 'kill-word'), + (r'\M-f', 'forward-word'), + (r'\M-l', 'downcase-word'), + (r'\M-t', 'transpose-words'), + (r'\M-u', 'upcase-word'), + (r'\M-y', 'yank-pop'), + (r'\M--', 'digit-arg'), + (r'\M-0', 'digit-arg'), + (r'\M-1', 'digit-arg'), + (r'\M-2', 'digit-arg'), + (r'\M-3', 'digit-arg'), + (r'\M-4', 'digit-arg'), + (r'\M-5', 'digit-arg'), + (r'\M-6', 'digit-arg'), + (r'\M-7', 'digit-arg'), + (r'\M-8', 'digit-arg'), + (r'\M-9', 'digit-arg'), + #(r'\M-\n', 'insert-nl'), + ('\\\\', 'self-insert')] + \ + [(c, 'self-insert') + for c in map(chr, range(32, 127)) if c <> '\\'] + \ + [(c, 'self-insert') + for c in map(chr, range(128, 256)) if c.isalpha()] + \ + [(r'\', 'up'), + (r'\', 'down'), + (r'\', 'left'), + (r'\', 'right'), + (r'\', 'quoted-insert'), + (r'\', 'delete'), + (r'\', 'backspace'), + (r'\M-\', 'backward-kill-word'), + (r'\', 'end'), + (r'\', 'home'), + (r'\', 'help'), + (r'\EOF', 'end'), # the entries in the terminfo database for xterms + (r'\EOH', 'home'), # seem to be wrong. this is a less than ideal + # workaround + ]) + +del c # from the listcomps + +class Reader(object): + """The Reader class implements the bare bones of a command reader, + handling such details as editing and cursor motion. What it does + not support are such things as completion or history support - + these are implemented elsewhere. + + Instance variables of note include: + + * buffer: + A *list* (*not* a string at the moment :-) containing all the + characters that have been entered. + * console: + Hopefully encapsulates the OS dependent stuff. + * pos: + A 0-based index into `buffer' for where the insertion point + is. + * screeninfo: + Ahem. This list contains some info needed to move the + insertion point around reasonably efficiently. I'd like to + get rid of it, because its contents are obtuse (to put it + mildly) but I haven't worked out if that is possible yet. + * cxy, lxy: + the position of the insertion point in screen ... XXX + * syntax_table: + Dictionary mapping characters to `syntax class'; read the + emacs docs to see what this means :-) + * commands: + Dictionary mapping command names to command classes. + * arg: + The emacs-style prefix argument. It will be None if no such + argument has been provided. + * dirty: + True if we need to refresh the display. + * kill_ring: + The emacs-style kill-ring; manipulated with yank & yank-pop + * ps1, ps2, ps3, ps4: + prompts. ps1 is the prompt for a one-line input; for a + multiline input it looks like: + ps2> first line of input goes here + ps3> second and further + ps3> lines get ps3 + ... + ps4> and the last one gets ps4 + As with the usual top-level, you can set these to instances if + you like; str() will be called on them (once) at the beginning + of each command. Don't put really long or newline containing + strings here, please! + This is just the default policy; you can change it freely by + overriding get_prompt() (and indeed some standard subclasses + do). + * finished: + handle1 will set this to a true value if a command signals + that we're done. + """ + + help_text = """\ +This is pyrepl. Hear my roar. + +Helpful text may appear here at some point in the future when I'm +feeling more loquacious than I am now.""" + + msg_at_bottom = True + + def __init__(self, console): + self.buffer = [] + self.ps1 = "->> " + self.ps2 = "/>> " + self.ps3 = "|.. " + self.ps4 = "\__ " + self.kill_ring = [] + self.arg = None + self.finished = 0 + self.console = console + self.commands = {} + self.msg = '' + for v in vars(commands).values(): + if ( isinstance(v, type) + and issubclass(v, commands.Command) + and v.__name__[0].islower() ): + self.commands[v.__name__] = v + self.commands[v.__name__.replace('_', '-')] = v + self.syntax_table = make_default_syntax_table() + self.input_trans_stack = [] + self.keymap = self.collect_keymap() + self.input_trans = input.KeymapTranslator( + self.keymap, + invalid_cls='invalid-key', + character_cls='self-insert') + + def collect_keymap(self): + return default_keymap + + def calc_screen(self): + """The purpose of this method is to translate changes in + self.buffer into changes in self.screen. Currently it rips + everything down and starts from scratch, which whilst not + especially efficient is certainly simple(r). + """ + lines = self.get_unicode().split("\n") + screen = [] + screeninfo = [] + w = self.console.width - 1 + p = self.pos + for ln, line in zip(range(len(lines)), lines): + ll = len(line) + if 0 <= p <= ll: + if self.msg and not self.msg_at_bottom: + for mline in self.msg.split("\n"): + screen.append(mline) + screeninfo.append((0, [])) + self.lxy = p, ln + prompt = self.get_prompt(ln, ll >= p >= 0) + p -= ll + 1 + lp = len(prompt) + l, l2 = disp_str(line) + wrapcount = (len(l) + lp) / w + if wrapcount == 0: + screen.append(prompt + l) + screeninfo.append((lp, l2+[1])) + else: + screen.append(prompt + l[:w-lp] + "\\") + screeninfo.append((lp, l2[:w-lp])) + for i in range(-lp + w, -lp + wrapcount*w, w): + screen.append(l[i:i+w] + "\\") + screeninfo.append((0, l2[i:i + w])) + screen.append(l[wrapcount*w - lp:]) + screeninfo.append((0, l2[wrapcount*w - lp:]+[1])) + self.screeninfo = screeninfo + self.cxy = self.pos2xy(self.pos) + if self.msg and self.msg_at_bottom: + for mline in self.msg.split("\n"): + screen.append(mline) + screeninfo.append((0, [])) + return screen + + def bow(self, p=None): + """Return the 0-based index of the word break preceding p most + immediately. + + p defaults to self.pos; word boundaries are determined using + self.syntax_table.""" + if p is None: + p = self.pos + st = self.syntax_table + b = self.buffer + p -= 1 + while p >= 0 and st.get(b[p], SYNTAX_WORD) <> SYNTAX_WORD: + p -= 1 + while p >= 0 and st.get(b[p], SYNTAX_WORD) == SYNTAX_WORD: + p -= 1 + return p + 1 + + def eow(self, p=None): + """Return the 0-based index of the word break following p most + immediately. + + p defaults to self.pos; word boundaries are determined using + self.syntax_table.""" + if p is None: + p = self.pos + st = self.syntax_table + b = self.buffer + while p < len(b) and st.get(b[p], SYNTAX_WORD) <> SYNTAX_WORD: + p += 1 + while p < len(b) and st.get(b[p], SYNTAX_WORD) == SYNTAX_WORD: + p += 1 + return p + + def bol(self, p=None): + """Return the 0-based index of the line break preceding p most + immediately. + + p defaults to self.pos.""" + # XXX there are problems here. + if p is None: + p = self.pos + b = self.buffer + p -= 1 + while p >= 0 and b[p] <> '\n': + p -= 1 + return p + 1 + + def eol(self, p=None): + """Return the 0-based index of the line break following p most + immediately. + + p defaults to self.pos.""" + if p is None: + p = self.pos + b = self.buffer + while p < len(b) and b[p] <> '\n': + p += 1 + return p + + def get_arg(self, default=1): + """Return any prefix argument that the user has supplied, + returning `default' if there is None. `default' defaults + (groan) to 1.""" + if self.arg is None: + return default + else: + return self.arg + + def get_prompt(self, lineno, cursor_on_line): + """Return what should be in the left-hand margin for line + `lineno'.""" + if self.arg is not None and cursor_on_line: + return "(arg: %s) "%self.arg + if "\n" in self.buffer: + if lineno == 0: + return self._ps2 + elif lineno == self.buffer.count("\n"): + return self._ps4 + else: + return self._ps3 + else: + return self._ps1 + + def push_input_trans(self, itrans): + self.input_trans_stack.append(self.input_trans) + self.input_trans = itrans + + def pop_input_trans(self): + self.input_trans = self.input_trans_stack.pop() + + def pos2xy(self, pos): + """Return the x, y coordinates of position 'pos'.""" + # this *is* incomprehensible, yes. + y = 0 + assert 0 <= pos <= len(self.buffer) + if pos == len(self.buffer): + y = len(self.screeninfo) - 1 + p, l2 = self.screeninfo[y] + return p + len(l2) - 1, y + else: + for p, l2 in self.screeninfo: + l = l2.count(1) + if l > pos: + break + else: + pos -= l + y += 1 + c = 0 + i = 0 + while c < pos: + c += l2[i] + i += 1 + while l2[i] == 0: + i += 1 + return p + i, y + + def insert(self, text): + """Insert 'text' at the insertion point.""" + self.buffer[self.pos:self.pos] = list(text) + self.pos += len(text) + self.dirty = 1 + + def update_cursor(self): + """Move the cursor to reflect changes in self.pos""" + self.cxy = self.pos2xy(self.pos) + self.console.move_cursor(*self.cxy) + + def after_command(self, cmd): + """This function is called to allow post command cleanup.""" + if getattr(cmd, "kills_digit_arg", 1): + if self.arg is not None: + self.dirty = 1 + self.arg = None + + def prepare(self): + """Get ready to run. Call restore when finished. You must not + write to the console in between the calls to prepare and + restore.""" + try: + self.console.prepare() + self.arg = None + self.screeninfo = [] + self.finished = 0 + del self.buffer[:] + self.pos = 0 + self.dirty = 1 + self.last_command = None + self._ps1, self._ps2, self._ps3, self._ps4 = \ + map(str, [self.ps1, self.ps2, self.ps3, self.ps4]) + except: + self.restore() + raise + + def last_command_is(self, klass): + if not self.last_command: + return 0 + return issubclass(klass, self.last_command) + + def restore(self): + """Clean up after a run.""" + self.console.restore() + + def finish(self): + """Called when a command signals that we're finished.""" + pass + + def error(self, msg="none"): + self.msg = "! " + msg + " " + self.dirty = 1 + self.console.beep() + + def update_screen(self): + if self.dirty: + self.refresh() + + def refresh(self): + """Recalculate and refresh the screen.""" + # this call sets up self.cxy, so call it first. + screen = self.calc_screen() + self.console.refresh(screen, self.cxy) + self.dirty = 0 # forgot this for a while (blush) + + def do_cmd(self, cmd): + #print cmd + if isinstance(cmd[0], str): + cmd = self.commands.get(cmd[0], + commands.invalid_command)(self, cmd) + elif isinstance(cmd[0], type): + cmd = cmd[0](self, cmd) + + cmd.do() + + self.after_command(cmd) + + if self.dirty: + self.refresh() + else: + self.update_cursor() + + if not isinstance(cmd, commands.digit_arg): + self.last_command = cmd.__class__ + + self.finished = cmd.finish + if self.finished: + self.console.finish() + self.finish() + + def handle1(self, block=1): + """Handle a single event. Wait as long as it takes if block + is true (the default), otherwise return None if no event is + pending.""" + + if self.msg: + self.msg = '' + self.dirty = 1 + + while 1: + event = self.console.get_event(block) + if not event: # can only happen if we're not blocking + return None + + if event.evt == 'key': + self.input_trans.push(event) + elif event.evt == 'scroll': + self.refresh() + elif event.evt == 'resize': + self.refresh() + else: + pass + + cmd = self.input_trans.get() + + if cmd is None: + if block: + continue + else: + return None + + self.do_cmd(cmd) + return 1 + + def push_char(self, char): + self.console.push_char(char) + self.handle1(0) + + def readline(self): + """Read a line. The implementation of this method also shows + how to drive Reader if you want more control over the event + loop.""" + self.prepare() + try: + self.refresh() + while not self.finished: + self.handle1() + return self.get_buffer() + finally: + self.restore() + + def bind(self, spec, command): + self.keymap = self.keymap + ((spec, command),) + self.input_trans = input.KeymapTranslator( + self.keymap, + invalid_cls='invalid-key', + character_cls='self-insert') + + def get_buffer(self, encoding=None): + if encoding is None: + encoding = self.console.encoding + return u''.join(self.buffer).encode(self.console.encoding) + + def get_unicode(self): + """Return the current buffer as a unicode string.""" + return u''.join(self.buffer) + +def test(): + from pyrepl.unix_console import UnixConsole + reader = Reader(UnixConsole()) + reader.ps1 = "**> " + reader.ps2 = "/*> " + reader.ps3 = "|*> " + reader.ps4 = "\*> " + while reader.readline(): + pass + +if __name__=='__main__': + test() diff --git a/lib_pypy/pyrepl/simple_interact.py b/lib_pypy/pyrepl/simple_interact.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/simple_interact.py @@ -0,0 +1,64 @@ +# Copyright 2000-2010 Michael Hudson-Doyle +# Armin Rigo +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +"""This is an alternative to python_reader which tries to emulate +the CPython prompt as closely as possible, with the exception of +allowing multiline input and multiline history entries. +""" + +import sys +from pyrepl.readline import multiline_input, _error, _get_reader + +def check(): # returns False if there is a problem initializing the state + try: + _get_reader() + except _error: + return False + return True + +def run_multiline_interactive_console(mainmodule=None): + import code + if mainmodule is None: + import __main__ as mainmodule + console = code.InteractiveConsole(mainmodule.__dict__) + + def more_lines(unicodetext): + # ooh, look at the hack: + src = "#coding:utf-8\n"+unicodetext.encode('utf-8') + try: + code = console.compile(src, '', 'single') + except (OverflowError, SyntaxError, ValueError): + return False + else: + return code is None + + while 1: + try: + ps1 = getattr(sys, 'ps1', '>>> ') + ps2 = getattr(sys, 'ps2', '... ') + try: + statement = multiline_input(more_lines, ps1, ps2) + except EOFError: + break + more = console.push(statement) + assert not more + except KeyboardInterrupt: + console.write("\nKeyboardInterrupt\n") + console.resetbuffer() diff --git a/lib_pypy/pyrepl/python_reader.py b/lib_pypy/pyrepl/python_reader.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/python_reader.py @@ -0,0 +1,392 @@ +# Copyright 2000-2007 Michael Hudson-Doyle +# Bob Ippolito +# Maciek Fijalkowski +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# one impressive collections of imports: +from pyrepl.completing_reader import CompletingReader +from pyrepl.historical_reader import HistoricalReader +from pyrepl import completing_reader, reader +from pyrepl import copy_code, commands, completer +from pyrepl import module_lister +import new, sys, os, re, code, traceback +import atexit, warnings +try: + import cPickle as pickle +except ImportError: + import pickle +try: + import imp + imp.find_module("twisted") + from twisted.internet import reactor + from twisted.internet.abstract import FileDescriptor +except ImportError: + default_interactmethod = "interact" +else: + default_interactmethod = "twistedinteract" + +CommandCompiler = code.CommandCompiler + +def eat_it(*args): + """this function eats warnings, if you were wondering""" + pass + +class maybe_accept(commands.Command): + def do(self): + r = self.reader + text = r.get_unicode() + try: + # ooh, look at the hack: + code = r.compiler("#coding:utf-8\n"+text.encode('utf-8')) + except (OverflowError, SyntaxError, ValueError): + self.finish = 1 + else: + if code is None: + r.insert("\n") + else: + self.finish = 1 + +from_line_prog = re.compile( + "^from\s+(?P[A-Za-z_.0-9]*)\s+import\s+(?P[A-Za-z_.0-9]*)") +import_line_prog = re.compile( + "^(?:import|from)\s+(?P[A-Za-z_.0-9]*)\s*$") + +def mk_saver(reader): + def saver(reader=reader): + try: + file = open(os.path.expanduser("~/.pythoni.hist"), "w") + except IOError: + pass + else: + pickle.dump(reader.history, file) + file.close() + return saver + +class PythonicReader(CompletingReader, HistoricalReader): + def collect_keymap(self): + return super(PythonicReader, self).collect_keymap() + ( + (r'\n', 'maybe-accept'), + (r'\M-\n', 'insert-nl')) + + def __init__(self, console, locals, + compiler=None): + super(PythonicReader, self).__init__(console) + self.completer = completer.Completer(locals) + st = self.syntax_table + for c in "._0123456789": + st[c] = reader.SYNTAX_WORD + self.locals = locals + if compiler is None: + self.compiler = CommandCompiler() + else: + self.compiler = compiler + try: + file = open(os.path.expanduser("~/.pythoni.hist")) + except IOError: + pass + else: + try: + self.history = pickle.load(file) + except: + self.history = [] + self.historyi = len(self.history) + file.close() + atexit.register(mk_saver(self)) + for c in [maybe_accept]: + self.commands[c.__name__] = c + self.commands[c.__name__.replace('_', '-')] = c + + def get_completions(self, stem): + b = self.get_unicode() + m = import_line_prog.match(b) + if m: + if not self._module_list_ready: + module_lister._make_module_list() + self._module_list_ready = True + + mod = m.group("mod") + try: + return module_lister.find_modules(mod) + except ImportError: + pass + m = from_line_prog.match(b) + if m: + mod, name = m.group("mod", "name") + try: + l = module_lister._packages[mod] + except KeyError: + try: + mod = __import__(mod, self.locals, self.locals, ['']) + return [x for x in dir(mod) if x.startswith(name)] + except ImportError: + pass + else: + return [x[len(mod) + 1:] + for x in l if x.startswith(mod + '.' + name)] + try: + l = completing_reader.uniqify(self.completer.complete(stem)) + return l + except (NameError, AttributeError): + return [] + +class ReaderConsole(code.InteractiveInterpreter): + II_init = code.InteractiveInterpreter.__init__ + def __init__(self, console, locals=None): + if locals is None: + locals = {} + self.II_init(locals) + self.compiler = CommandCompiler() + self.compile = self.compiler.compiler + self.reader = PythonicReader(console, locals, self.compiler) + locals['Reader'] = self.reader + + def run_user_init_file(self): + for key in "PYREPLSTARTUP", "PYTHONSTARTUP": + initfile = os.environ.get(key) + if initfile is not None and os.path.exists(initfile): + break + else: + return + try: + execfile(initfile, self.locals, self.locals) + except: + etype, value, tb = sys.exc_info() + traceback.print_exception(etype, value, tb.tb_next) + + def execute(self, text): + try: + # ooh, look at the hack: + code = self.compile("# coding:utf8\n"+text.encode('utf-8'), + '', 'single') + except (OverflowError, SyntaxError, ValueError): + self.showsyntaxerror("") + else: + self.runcode(code) + sys.stdout.flush() + + def interact(self): + while 1: + try: # catches EOFError's and KeyboardInterrupts during execution + try: # catches KeyboardInterrupts during editing + try: # warning saver + # can't have warnings spewed onto terminal + sv = warnings.showwarning + warnings.showwarning = eat_it + l = unicode(self.reader.readline(), 'utf-8') + finally: + warnings.showwarning = sv + except KeyboardInterrupt: + print "KeyboardInterrupt" + else: + if l: + self.execute(l) + except EOFError: + break + except KeyboardInterrupt: + continue + + def prepare(self): + self.sv_sw = warnings.showwarning + warnings.showwarning = eat_it + self.reader.prepare() + self.reader.refresh() # we want :after methods... + + def restore(self): + self.reader.restore() + warnings.showwarning = self.sv_sw + + def handle1(self, block=1): + try: + r = 1 + r = self.reader.handle1(block) + except KeyboardInterrupt: + self.restore() + print "KeyboardInterrupt" + self.prepare() + else: + if self.reader.finished: + text = self.reader.get_unicode() + self.restore() + if text: + self.execute(text) + self.prepare() + return r + + def tkfilehandler(self, file, mask): + try: + self.handle1(block=0) + except: + self.exc_info = sys.exc_info() + + # how the do you get this to work on Windows (without + # createfilehandler)? threads, I guess + def really_tkinteract(self): + import _tkinter + _tkinter.createfilehandler( + self.reader.console.input_fd, _tkinter.READABLE, + self.tkfilehandler) + + self.exc_info = None + while 1: + # dooneevent will return 0 without blocking if there are + # no Tk windows, 1 after blocking until an event otherwise + # so the following does what we want (this wasn't expected + # to be obvious). + if not _tkinter.dooneevent(_tkinter.ALL_EVENTS): + self.handle1(block=1) + if self.exc_info: + type, value, tb = self.exc_info + self.exc_info = None + raise type, value, tb + + def tkinteract(self): + """Run a Tk-aware Python interactive session. + + This function simulates the Python top-level in a way that + allows Tk's mainloop to run.""" + + # attempting to understand the control flow of this function + # without help may cause internal injuries. so, some + # explanation. + + # The outer while loop is there to restart the interaction if + # the user types control-c when execution is deep in our + # innards. I'm not sure this can't leave internals in an + # inconsistent state, but it's a good start. + + # then the inside loop keeps calling self.handle1 until + # _tkinter gets imported; then control shifts to + # self.really_tkinteract, above. + + # this function can only return via an exception; we mask + # EOFErrors (but they end the interaction) and + # KeyboardInterrupts cause a restart. All other exceptions + # are likely bugs in pyrepl (well, 'cept for SystemExit, of + # course). + + while 1: + try: + try: + self.prepare() + try: + while 1: + if sys.modules.has_key("_tkinter"): + self.really_tkinteract() + # really_tkinteract is not expected to + # return except via an exception, but: + break + self.handle1() + except EOFError: + pass + finally: + self.restore() + except KeyboardInterrupt: + continue + else: + break + + def twistedinteract(self): + from twisted.internet import reactor + from twisted.internet.abstract import FileDescriptor + import signal + outerself = self + class Me(FileDescriptor): + def fileno(self): + """ We want to select on FD 0 """ + return 0 + + def doRead(self): + """called when input is ready""" + try: + outerself.handle1() + except EOFError: + reactor.stop() + + reactor.addReader(Me()) + reactor.callWhenRunning(signal.signal, + signal.SIGINT, + signal.default_int_handler) + self.prepare() + try: + reactor.run() + finally: + self.restore() + + + def cocoainteract(self, inputfilehandle=None, outputfilehandle=None): + # only call this when there's a run loop already going! + # note that unlike the other *interact methods, this returns immediately + from cocoasupport import CocoaInteracter + self.cocoainteracter = CocoaInteracter.alloc().init(self, inputfilehandle, outputfilehandle) + + +def main(use_pygame_console=0, interactmethod=default_interactmethod, print_banner=True, clear_main=True): + si, se, so = sys.stdin, sys.stderr, sys.stdout + try: + if 0 and use_pygame_console: # pygame currently borked + from pyrepl.pygame_console import PyGameConsole, FakeStdin, FakeStdout + con = PyGameConsole() + sys.stderr = sys.stdout = FakeStdout(con) + sys.stdin = FakeStdin(con) + else: + from pyrepl.unix_console import UnixConsole + try: + import locale + except ImportError: + encoding = None + else: + if hasattr(locale, 'nl_langinfo') \ + and hasattr(locale, 'CODESET'): + encoding = locale.nl_langinfo(locale.CODESET) + elif os.environ.get('TERM_PROGRAM') == 'Apple_Terminal': + # /me whistles innocently... + code = int(os.popen( + "defaults read com.apple.Terminal StringEncoding" + ).read()) + if code == 4: + encoding = 'utf-8' + # More could go here -- and what's here isn't + # bulletproof. What would be? AppleScript? + # Doesn't seem to be possible. + else: + encoding = None + else: + encoding = None # so you get ASCII... + con = UnixConsole(0, 1, None, encoding) + if print_banner: + print "Python", sys.version, "on", sys.platform + print 'Type "help", "copyright", "credits" or "license" '\ + 'for more information.' + sys.path.insert(0, os.getcwd()) + + if clear_main and __name__ != '__main__': + mainmod = new.module('__main__') + sys.modules['__main__'] = mainmod + else: + mainmod = sys.modules['__main__'] + + rc = ReaderConsole(con, mainmod.__dict__) + rc.reader._module_list_ready = False + rc.run_user_init_file() + getattr(rc, interactmethod)() + finally: + sys.stdin, sys.stderr, sys.stdout = si, se, so + +if __name__ == '__main__': + main() diff --git a/lib_pypy/pyrepl/pygame_console.py b/lib_pypy/pyrepl/pygame_console.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/pygame_console.py @@ -0,0 +1,353 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +# the pygame console is currently thoroughly broken. + +# there's a fundamental difference from the UnixConsole: here we're +# the terminal emulator too, in effect. This means, e.g., for pythoni +# we really need a separate process (or thread) to monitor for ^C +# during command execution and zap the executor process. Making this +# work on non-Unix is expected to be even more entertaining. + +from pygame.locals import * +from pyrepl.console import Console, Event +from pyrepl import pygame_keymap +import pygame +import types + +lmargin = 5 +rmargin = 5 +tmargin = 5 +bmargin = 5 + +try: + bool +except NameError: + def bool(x): + return not not x + +modcolors = {K_LCTRL:1, + K_RCTRL:1, + K_LMETA:1, + K_RMETA:1, + K_LALT:1, + K_RALT:1, + K_LSHIFT:1, + K_RSHIFT:1} + +class colors: + fg = 250,240,230 + bg = 5, 5, 5 + cursor = 230, 0, 230 + margin = 5, 5, 15 + +class FakeStdout: + def __init__(self, con): + self.con = con + def write(self, text): + self.con.write(text) + def flush(self): + pass + +class FakeStdin: + def __init__(self, con): + self.con = con + def read(self, n=None): + # argh! + raise NotImplementedError + def readline(self, n=None): + from reader import Reader + try: + # this isn't quite right: it will clobber any prompt that's + # been printed. Not sure how to get around this... + return Reader(self.con).readline() + except EOFError: + return '' + +class PyGameConsole(Console): + """Attributes: + + (keymap), + (fd), + screen, + height, + width, + """ + + def __init__(self): + self.pygame_screen = pygame.display.set_mode((800, 600)) + pygame.font.init() + pygame.key.set_repeat(500, 30) + self.font = pygame.font.Font( + "/usr/X11R6/lib/X11/fonts/TTF/luximr.ttf", 15) + self.fw, self.fh = self.fontsize = self.font.size("X") + self.cursor = pygame.Surface(self.fontsize) + self.cursor.fill(colors.cursor) + self.clear() + self.curs_vis = 1 + self.height, self.width = self.getheightwidth() + pygame.display.update() + pygame.event.set_allowed(None) + pygame.event.set_allowed(KEYDOWN) + + def install_keymap(self, keymap): + """Install a given keymap. + + keymap is a tuple of 2-element tuples; each small tuple is a + pair (keyspec, event-name). The format for keyspec is + modelled on that used by readline (so read that manual for + now!).""" + self.k = self.keymap = pygame_keymap.compile_keymap(keymap) + + def char_rect(self, x, y): + return self.char_pos(x, y), self.fontsize + + def char_pos(self, x, y): + return (lmargin + x*self.fw, + tmargin + y*self.fh + self.cur_top + self.scroll) + + def paint_margin(self): + s = self.pygame_screen + c = colors.margin + s.fill(c, [0, 0, 800, tmargin]) + s.fill(c, [0, 0, lmargin, 600]) + s.fill(c, [0, 600 - bmargin, 800, bmargin]) + s.fill(c, [800 - rmargin, 0, lmargin, 600]) + + def refresh(self, screen, (cx, cy)): + self.screen = screen + self.pygame_screen.fill(colors.bg, + [0, tmargin + self.cur_top + self.scroll, + 800, 600]) + self.paint_margin() + + line_top = self.cur_top + width, height = self.fontsize + self.cxy = (cx, cy) + cp = self.char_pos(cx, cy) + if cp[1] < tmargin: + self.scroll = - (cy*self.fh + self.cur_top) + self.repaint() + elif cp[1] + self.fh > 600 - bmargin: + self.scroll += (600 - bmargin) - (cp[1] + self.fh) + self.repaint() + if self.curs_vis: + self.pygame_screen.blit(self.cursor, self.char_pos(cx, cy)) + for line in screen: + if 0 <= line_top + self.scroll <= (600 - bmargin - tmargin - self.fh): + if line: + ren = self.font.render(line, 1, colors.fg) + self.pygame_screen.blit(ren, (lmargin, + tmargin + line_top + self.scroll)) + line_top += self.fh + pygame.display.update() + + def prepare(self): + self.cmd_buf = '' + self.k = self.keymap + self.height, self.width = self.getheightwidth() + self.curs_vis = 1 + self.cur_top = self.pos[0] + self.event_queue = [] + + def restore(self): + pass + + def blit_a_char(self, linen, charn): + line = self.screen[linen] + if charn < len(line): + text = self.font.render(line[charn], 1, colors.fg) + self.pygame_screen.blit(text, self.char_pos(charn, linen)) + + def move_cursor(self, x, y): + cp = self.char_pos(x, y) + if cp[1] < tmargin or cp[1] + self.fh > 600 - bmargin: + self.event_queue.append(Event('refresh', '', '')) + else: + if self.curs_vis: + cx, cy = self.cxy + self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy)) + self.blit_a_char(cy, cx) + self.pygame_screen.blit(self.cursor, cp) + self.blit_a_char(y, x) + pygame.display.update() + self.cxy = (x, y) + + def set_cursor_vis(self, vis): + self.curs_vis = vis + if vis: + self.move_cursor(*self.cxy) + else: + cx, cy = self.cxy + self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy)) + self.blit_a_char(cy, cx) + pygame.display.update() + + def getheightwidth(self): + """Return (height, width) where height and width are the height + and width of the terminal window in characters.""" + return ((600 - tmargin - bmargin)/self.fh, + (800 - lmargin - rmargin)/self.fw) + + def tr_event(self, pyg_event): + shift = bool(pyg_event.mod & KMOD_SHIFT) + ctrl = bool(pyg_event.mod & KMOD_CTRL) + meta = bool(pyg_event.mod & (KMOD_ALT|KMOD_META)) + + try: + return self.k[(pyg_event.unicode, meta, ctrl)], pyg_event.unicode + except KeyError: + try: + return self.k[(pyg_event.key, meta, ctrl)], pyg_event.unicode + except KeyError: + return "invalid-key", pyg_event.unicode + + def get_event(self, block=1): + """Return an Event instance. Returns None if |block| is false + and there is no event pending, otherwise waits for the + completion of an event.""" + while 1: + if self.event_queue: + return self.event_queue.pop(0) + elif block: + pyg_event = pygame.event.wait() + else: + pyg_event = pygame.event.poll() + if pyg_event.type == NOEVENT: + return + + if pyg_event.key in modcolors: + continue + + k, c = self.tr_event(pyg_event) + self.cmd_buf += c.encode('ascii', 'replace') + self.k = k + + if not isinstance(k, types.DictType): + e = Event(k, self.cmd_buf, []) + self.k = self.keymap + self.cmd_buf = '' + return e + + def beep(self): + # uhh, can't be bothered now. + # pygame.sound.something, I guess. + pass + + def clear(self): + """Wipe the screen""" + self.pygame_screen.fill(colors.bg) + #self.screen = [] + self.pos = [0, 0] + self.grobs = [] + self.cur_top = 0 + self.scroll = 0 + + def finish(self): + """Move the cursor to the end of the display and otherwise get + ready for end. XXX could be merged with restore? Hmm.""" + if self.curs_vis: + cx, cy = self.cxy + self.pygame_screen.fill(colors.bg, self.char_rect(cx, cy)) + self.blit_a_char(cy, cx) + for line in self.screen: + self.write_line(line, 1) + if self.curs_vis: + self.pygame_screen.blit(self.cursor, + (lmargin + self.pos[1], + tmargin + self.pos[0] + self.scroll)) + pygame.display.update() + + def flushoutput(self): + """Flush all output to the screen (assuming there's some + buffering going on somewhere)""" + # no buffering here, ma'am (though perhaps there should be!) + pass + + def forgetinput(self): + """Forget all pending, but not yet processed input.""" + while pygame.event.poll().type <> NOEVENT: + pass + + def getpending(self): + """Return the characters that have been typed but not yet + processed.""" + events = [] + while 1: + event = pygame.event.poll() + if event.type == NOEVENT: + break + events.append(event) + + return events + + def wait(self): + """Wait for an event.""" + raise Exception, "erp!" + + def repaint(self): + # perhaps we should consolidate grobs? + self.pygame_screen.fill(colors.bg) + self.paint_margin() + for (y, x), surf, text in self.grobs: + if surf and 0 < y + self.scroll: + self.pygame_screen.blit(surf, (lmargin + x, + tmargin + y + self.scroll)) + pygame.display.update() + + def write_line(self, line, ret): + charsleft = (self.width*self.fw - self.pos[1])/self.fw + while len(line) > charsleft: + self.write_line(line[:charsleft], 1) + line = line[charsleft:] + if line: + ren = self.font.render(line, 1, colors.fg, colors.bg) + self.grobs.append((self.pos[:], ren, line)) + self.pygame_screen.blit(ren, + (lmargin + self.pos[1], + tmargin + self.pos[0] + self.scroll)) + else: + self.grobs.append((self.pos[:], None, line)) + if ret: + self.pos[0] += self.fh + if tmargin + self.pos[0] + self.scroll + self.fh > 600 - bmargin: + self.scroll = 600 - bmargin - self.pos[0] - self.fh - tmargin + self.repaint() + self.pos[1] = 0 + else: + self.pos[1] += self.fw*len(line) + + def write(self, text): + lines = text.split("\n") + if self.curs_vis: + self.pygame_screen.fill(colors.bg, + (lmargin + self.pos[1], + tmargin + self.pos[0] + self.scroll, + self.fw, self.fh)) + for line in lines[:-1]: + self.write_line(line, 1) + self.write_line(lines[-1], 0) + if self.curs_vis: + self.pygame_screen.blit(self.cursor, + (lmargin + self.pos[1], + tmargin + self.pos[0] + self.scroll)) + pygame.display.update() + + def flush(self): + pass diff --git a/lib_pypy/pyrepl/copy_code.py b/lib_pypy/pyrepl/copy_code.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/copy_code.py @@ -0,0 +1,73 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import new + +def copy_code_with_changes(codeobject, + argcount=None, + nlocals=None, + stacksize=None, + flags=None, + code=None, + consts=None, + names=None, + varnames=None, + filename=None, + name=None, + firstlineno=None, + lnotab=None): + if argcount is None: argcount = codeobject.co_argcount + if nlocals is None: nlocals = codeobject.co_nlocals + if stacksize is None: stacksize = codeobject.co_stacksize + if flags is None: flags = codeobject.co_flags + if code is None: code = codeobject.co_code + if consts is None: consts = codeobject.co_consts + if names is None: names = codeobject.co_names + if varnames is None: varnames = codeobject.co_varnames + if filename is None: filename = codeobject.co_filename + if name is None: name = codeobject.co_name + if firstlineno is None: firstlineno = codeobject.co_firstlineno + if lnotab is None: lnotab = codeobject.co_lnotab + return new.code(argcount, + nlocals, + stacksize, + flags, + code, + consts, + names, + varnames, + filename, + name, + firstlineno, + lnotab) + +code_attrs=['argcount', + 'nlocals', + 'stacksize', + 'flags', + 'code', + 'consts', + 'names', + 'varnames', + 'filename', + 'name', + 'firstlineno', + 'lnotab'] + + diff --git a/lib_pypy/pyrepl/fancy_termios.py b/lib_pypy/pyrepl/fancy_termios.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/fancy_termios.py @@ -0,0 +1,52 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +import termios + +class TermState: + def __init__(self, tuples): + self.iflag, self.oflag, self.cflag, self.lflag, \ + self.ispeed, self.ospeed, self.cc = tuples + def as_list(self): + return [self.iflag, self.oflag, self.cflag, self.lflag, + self.ispeed, self.ospeed, self.cc] + + def copy(self): + return self.__class__(self.as_list()) + +def tcgetattr(fd): + return TermState(termios.tcgetattr(fd)) + +def tcsetattr(fd, when, attrs): + termios.tcsetattr(fd, when, attrs.as_list()) + +class Term(TermState): + TS__init__ = TermState.__init__ + def __init__(self, fd=0): + self.TS__init__(termios.tcgetattr(fd)) + self.fd = fd + self.stack = [] + def save(self): + self.stack.append( self.as_list() ) + def set(self, when=termios.TCSANOW): + termios.tcsetattr(self.fd, when, self.as_list()) + def restore(self): + self.TS__init__(self.stack.pop()) + self.set() + diff --git a/lib_pypy/pyrepl/tests/basic.py b/lib_pypy/pyrepl/tests/basic.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pyrepl/tests/basic.py @@ -0,0 +1,115 @@ +# Copyright 2000-2004 Michael Hudson-Doyle +# +# All Rights Reserved +# +# +# Permission to use, copy, modify, and distribute this software and +# its documentation for any purpose is hereby granted without fee, +# provided that the above copyright notice appear in all copies and +# that both that copyright notice and this permission notice appear in +# supporting documentation. +# +# THE AUTHOR MICHAEL HUDSON DISCLAIMS ALL WARRANTIES WITH REGARD TO +# THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY +# AND FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, +# INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER +# RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF +# CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN +# CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +from pyrepl.console import Event +from pyrepl.tests.infrastructure import ReaderTestCase, EA, run_testcase + +class SimpleTestCase(ReaderTestCase): + + def test_basic(self): + self.run_test([(('self-insert', 'a'), ['a']), + ( 'accept', ['a'])]) + + def test_repeat(self): + self.run_test([(('digit-arg', '3'), ['']), + (('self-insert', 'a'), ['aaa']), + ( 'accept', ['aaa'])]) + + def test_kill_line(self): + self.run_test([(('self-insert', 'abc'), ['abc']), + ( 'left', None), + ( 'kill-line', ['ab']), + ( 'accept', ['ab'])]) + + def test_unix_line_discard(self): + self.run_test([(('self-insert', 'abc'), ['abc']), + ( 'left', None), + ( 'unix-word-rubout', ['c']), + ( 'accept', ['c'])]) + + def test_kill_word(self): + self.run_test([(('self-insert', 'ab cd'), ['ab cd']), + ( 'beginning-of-line', ['ab cd']), + ( 'kill-word', [' cd']), + ( 'accept', [' cd'])]) + + def test_backward_kill_word(self): + self.run_test([(('self-insert', 'ab cd'), ['ab cd']), + ( 'backward-kill-word', ['ab ']), + ( 'accept', ['ab '])]) + + def test_yank(self): + self.run_test([(('self-insert', 'ab cd'), ['ab cd']), + ( 'backward-kill-word', ['ab ']), + ( 'beginning-of-line', ['ab ']), + ( 'yank', ['cdab ']), + ( 'accept', ['cdab '])]) + + def test_yank_pop(self): + self.run_test([(('self-insert', 'ab cd'), ['ab cd']), + ( 'backward-kill-word', ['ab ']), + ( 'left', ['ab ']), + ( 'backward-kill-word', [' ']), + ( 'yank', ['ab ']), + ( 'yank-pop', ['cd ']), + ( 'accept', ['cd '])]) + + def test_interrupt(self): + try: + self.run_test([( 'interrupt', [''])]) + except KeyboardInterrupt: + pass + else: + self.fail('KeyboardInterrupt got lost') + + # test_suspend -- hah + + def test_up(self): + self.run_test([(('self-insert', 'ab\ncd'), ['ab', 'cd']), + ( 'up', ['ab', 'cd']), + (('self-insert', 'e'), ['abe', 'cd']), + ( 'accept', ['abe', 'cd'])]) + + def test_down(self): + self.run_test([(('self-insert', 'ab\ncd'), ['ab', 'cd']), + ( 'up', ['ab', 'cd']), + (('self-insert', 'e'), ['abe', 'cd']), + ( 'down', ['abe', 'cd']), + (('self-insert', 'f'), ['abe', 'cdf']), + ( 'accept', ['abe', 'cdf'])]) + + def test_left(self): + self.run_test([(('self-insert', 'ab'), ['ab']), + ( 'left', ['ab']), + (('self-insert', 'c'), ['acb']), + ( 'accept', ['acb'])]) + + def test_right(self): + self.run_test([(('self-insert', 'ab'), ['ab']), + ( 'left', ['ab']), + (('self-insert', 'c'), ['acb']), + ( 'right', ['acb']), + (('self-insert', 'd'), ['acbd']), + ( 'accept', ['acbd'])]) + +def test(): + run_testcase(SimpleTestCase) + +if __name__ == '__main__': + test() From commits-noreply at bitbucket.org Mon Mar 21 17:27:31 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 17:27:31 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: skip stackless tests if greenlet is not importable Message-ID: <20110321162731.7A90B2A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42818:d3c9efe4a578 Date: 2011-03-21 17:26 +0100 http://bitbucket.org/pypy/pypy/changeset/d3c9efe4a578/ Log: skip stackless tests if greenlet is not importable diff --git a/pypy/module/_stackless/test/conftest.py b/pypy/module/_stackless/test/conftest.py --- a/pypy/module/_stackless/test/conftest.py +++ b/pypy/module/_stackless/test/conftest.py @@ -2,6 +2,7 @@ import py.test def pytest_runtest_setup(item): + py.test.importorskip('greenlet') if sys.platform == 'win32': py.test.skip("stackless tests segfault on Windows") From commits-noreply at bitbucket.org Mon Mar 21 17:53:12 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 17:53:12 +0100 (CET) Subject: [pypy-svn] pypy default: first step in fixing the force-build script - kill the svn path handling Message-ID: <20110321165312.68EAB36C202@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42819:74e1a8c4d8b2 Date: 2011-03-21 17:50 +0100 http://bitbucket.org/pypy/pypy/changeset/74e1a8c4d8b2/ Log: first step in fixing the force-build script - kill the svn path handling diff --git a/pypy/tool/release/force-builds.py b/pypy/tool/release/force-builds.py --- a/pypy/tool/release/force-builds.py +++ b/pypy/tool/release/force-builds.py @@ -31,11 +31,9 @@ ] def main(): + #XXX: handle release tags + #XXX: handle validity checks branch = sys.argv[1] - if (not branch.startswith('/branch/') and not branch.startswith('/tag/') and - not branch.startswith('/release/')): - branch = '/branch/' + branch - lock = defer.DeferredLock() requests = [] def ebList(err): From commits-noreply at bitbucket.org Mon Mar 21 19:16:15 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 21 Mar 2011 19:16:15 +0100 (CET) Subject: [pypy-svn] pypy numpy-exp: Fix crappy merge Message-ID: <20110321181615.BB9C22A202B@codespeak.net> Author: Maciej Fijalkowski Branch: numpy-exp Changeset: r42821:d4efba8e8369 Date: 2011-03-21 12:16 -0600 http://bitbucket.org/pypy/pypy/changeset/d4efba8e8369/ Log: Fix crappy merge diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -33,7 +33,7 @@ "struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", "_bisect", "binascii", "_multiprocessing", '_warnings', - "_collections", , 'micronumpy'] + "_collections", 'micronumpy'] )) translation_modules = default_modules.copy() From commits-noreply at bitbucket.org Mon Mar 21 19:35:24 2011 From: commits-noreply at bitbucket.org (fijal) Date: Mon, 21 Mar 2011 19:35:24 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: Add note about regexes Message-ID: <20110321183524.BE5C52A2002@codespeak.net> Author: Maciej Fijalkowski Branch: extradoc Changeset: r3380:f3716e7097da Date: 2011-03-21 12:35 -0600 http://bitbucket.org/pypy/extradoc/changeset/f3716e7097da/ Log: Add note about regexes diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -44,6 +44,12 @@ - the integer range analysis cannot deal with int_between, because it is lowered to uint arithmetic too early +- regular expressions are still not very efficient in cases. For example: + + re.search("b+", "a" * 1000 + "b") gets compiled to a resident call + re.search("(ab)+", "a" * 1000 + "b") almost doesn't get compiled and + gets very modest speedups with the JIT on (10-20%) + OPTIMIZATIONS ------------- From commits-noreply at bitbucket.org Mon Mar 21 20:09:05 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 20:09:05 +0100 (CET) Subject: [pypy-svn] pypy default: On windows, fix sys.executable on top of bin/py.py: sys.__dict__ must not be Message-ID: <20110321190905.96CA22A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42822:24918e8d21f9 Date: 2011-03-21 20:09 +0100 http://bitbucket.org/pypy/pypy/changeset/24918e8d21f9/ Log: On windows, fix sys.executable on top of bin/py.py: sys.__dict__ must not be realized before py.py startup code overrides sys.executable with its own value. This fixes a test in pypy/interpreter/test/test_zpy.py diff --git a/pypy/module/sys/__init__.py b/pypy/module/sys/__init__.py --- a/pypy/module/sys/__init__.py +++ b/pypy/module/sys/__init__.py @@ -110,7 +110,7 @@ if _WIN: from pypy.module.sys import vm w_handle = vm.get_dllhandle(space) - space.setattr(self, space.wrap("dllhandle"), w_handle) + space.setitem(self.w_dict, space.wrap("dllhandle"), w_handle) def getmodule(self, name): space = self.space From commits-noreply at bitbucket.org Mon Mar 21 20:28:08 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:28:08 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: First draft of a thank you psf blog post. Message-ID: <20110321192808.738B22A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3381:1bf6f25120ea Date: 2011-03-21 15:27 -0400 http://bitbucket.org/pypy/extradoc/changeset/1bf6f25120ea/ Log: First draft of a thank you psf blog post. diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst new file mode 100644 --- /dev/null +++ b/blog/draft/thank-you-psf.rst @@ -0,0 +1,15 @@ +A thank you to the PSF +====================== + +This year's PyCon was an incredible time, several members of the PyPy team were +there, and we'll be blogging more about our experienced in the coming days, +however for now we wanted to extend a thank to the Python Software Foundation +(PSF). + +As you may have heard, on Friday morning at PyCon Jesse Noller handed the PyPy +team a check for $10,000, on behalf of the PSF. This was in recognition of our +success in the past few years bringing PyPy from a research project to a fast, +compliant, production ready Python implementation, and to allow us to continue +our work on making it faster and more up to date with upstream version changes. + +Thanks you, PSF. From commits-noreply at bitbucket.org Mon Mar 21 20:28:08 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:28:08 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: added an xxx for an image. Message-ID: <20110321192808.E9A212A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3382:04acfd18528c Date: 2011-03-21 15:27 -0400 http://bitbucket.org/pypy/extradoc/changeset/04acfd18528c/ Log: added an xxx for an image. diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -12,4 +12,6 @@ compliant, production ready Python implementation, and to allow us to continue our work on making it faster and more up to date with upstream version changes. +.. image:: xxx.png + Thanks you, PSF. From commits-noreply at bitbucket.org Mon Mar 21 20:30:05 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:30:05 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: (alex, fijal) typo fix Message-ID: <20110321193005.51F4E2A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3383:df01a08f3942 Date: 2011-03-21 15:29 -0400 http://bitbucket.org/pypy/extradoc/changeset/df01a08f3942/ Log: (alex, fijal) typo fix diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -3,8 +3,8 @@ This year's PyCon was an incredible time, several members of the PyPy team were there, and we'll be blogging more about our experienced in the coming days, -however for now we wanted to extend a thank to the Python Software Foundation -(PSF). +however for now we wanted to extend a thank you to the Python Software +Foundation (PSF). As you may have heard, on Friday morning at PyCon Jesse Noller handed the PyPy team a check for $10,000, on behalf of the PSF. This was in recognition of our From commits-noreply at bitbucket.org Mon Mar 21 20:32:07 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:32:07 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: a link to the PSF Message-ID: <20110321193207.5533F2A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3384:1b331f3af03d Date: 2011-03-21 15:31 -0400 http://bitbucket.org/pypy/extradoc/changeset/1b331f3af03d/ Log: a link to the PSF diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -3,8 +3,8 @@ This year's PyCon was an incredible time, several members of the PyPy team were there, and we'll be blogging more about our experienced in the coming days, -however for now we wanted to extend a thank you to the Python Software -Foundation (PSF). +however for now we wanted to extend a thank you to the `Python Software +Foundation (PSF) `_. As you may have heard, on Friday morning at PyCon Jesse Noller handed the PyPy team a check for $10,000, on behalf of the PSF. This was in recognition of our From commits-noreply at bitbucket.org Mon Mar 21 20:38:06 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:38:06 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: another paragraph Message-ID: <20110321193806.90E322A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3385:a8c0c8fc5e0e Date: 2011-03-21 15:37 -0400 http://bitbucket.org/pypy/extradoc/changeset/a8c0c8fc5e0e/ Log: another paragraph diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -12,6 +12,11 @@ compliant, production ready Python implementation, and to allow us to continue our work on making it faster and more up to date with upstream version changes. +Beyond the large check, we're grateful for the endoresement this represents, +both of our work on PyPy, as well as all alternatve Python VMs. The PSF is +shifting its focus from representing just CPython to all of the Python +implementation, which we are very appreciative of. + .. image:: xxx.png Thanks you, PSF. From commits-noreply at bitbucket.org Mon Mar 21 20:42:50 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:42:50 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: typos Message-ID: <20110321194250.CAF6B2A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3386:2b563c7997ba Date: 2011-03-21 15:42 -0400 http://bitbucket.org/pypy/extradoc/changeset/2b563c7997ba/ Log: typos diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -12,8 +12,8 @@ compliant, production ready Python implementation, and to allow us to continue our work on making it faster and more up to date with upstream version changes. -Beyond the large check, we're grateful for the endoresement this represents, -both of our work on PyPy, as well as all alternatve Python VMs. The PSF is +Beyond the large check, we're grateful for the endorsement this represents, +both of our work on PyPy, as well as all alternative Python VMs. The PSF is shifting its focus from representing just CPython to all of the Python implementation, which we are very appreciative of. From commits-noreply at bitbucket.org Mon Mar 21 20:43:55 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:43:55 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: fix the tense Message-ID: <20110321194355.A0F212A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3387:480808daa38f Date: 2011-03-21 15:43 -0400 http://bitbucket.org/pypy/extradoc/changeset/480808daa38f/ Log: fix the tense diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -13,8 +13,8 @@ our work on making it faster and more up to date with upstream version changes. Beyond the large check, we're grateful for the endorsement this represents, -both of our work on PyPy, as well as all alternative Python VMs. The PSF is -shifting its focus from representing just CPython to all of the Python +both of our work on PyPy, as well as all alternative Python VMs. The PSF has +shifted its focus from representing just CPython to all of the Python implementation, which we are very appreciative of. .. image:: xxx.png From commits-noreply at bitbucket.org Mon Mar 21 20:56:17 2011 From: commits-noreply at bitbucket.org (lac) Date: Mon, 21 Mar 2011 20:56:17 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: 3 way merge Message-ID: <20110321195617.34DF82A2002@codespeak.net> Author: Laura Creighton Branch: extradoc Changeset: r3388:ad020dac5904 Date: 2011-03-21 20:55 +0100 http://bitbucket.org/pypy/extradoc/changeset/ad020dac5904/ Log: 3 way merge diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -1,22 +1,25 @@ A thank you to the PSF ====================== -This year's PyCon was an incredible time, several members of the PyPy team were -there, and we'll be blogging more about our experienced in the coming days, -however for now we wanted to extend a thank you to the `Python Software +This year's PyCon was an incredible time; several members of the PyPy team were +there, and we'll be blogging more about our experiences in the coming days. +However, we quickly wanted to extend a thank you to the `Python Software Foundation (PSF) `_. As you may have heard, on Friday morning at PyCon Jesse Noller handed the PyPy team a check for $10,000, on behalf of the PSF. This was in recognition of our -success in the past few years bringing PyPy from a research project to a fast, -compliant, production ready Python implementation, and to allow us to continue -our work on making it faster and more up to date with upstream version changes. +success over the past few years in bringing PyPy from a research project +to a fast, +compliant, production-ready Python implementation, and to allow us to continue +our work on making it faster and more up-to-date with upstream version changes. Beyond the large check, we're grateful for the endorsement this represents, -both of our work on PyPy, as well as all alternative Python VMs. The PSF has -shifted its focus from representing just CPython to all of the Python -implementation, which we are very appreciative of. +not only of our work on PyPy, but also of all alternatve Python VMs. +The PSF has +shifted its focus from representing just CPython to representing the Python +Language, reguardless of its implementation, something +we are very appreciative of. .. image:: xxx.png -Thanks you, PSF. +Thank you, PSF. From commits-noreply at bitbucket.org Mon Mar 21 20:58:21 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 21 Mar 2011 20:58:21 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: whitespace cleanup Message-ID: <20110321195821.7EDC42A2002@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3389:63e4617062b2 Date: 2011-03-21 15:58 -0400 http://bitbucket.org/pypy/extradoc/changeset/63e4617062b2/ Log: whitespace cleanup diff --git a/blog/draft/thank-you-psf.rst b/blog/draft/thank-you-psf.rst --- a/blog/draft/thank-you-psf.rst +++ b/blog/draft/thank-you-psf.rst @@ -8,17 +8,16 @@ As you may have heard, on Friday morning at PyCon Jesse Noller handed the PyPy team a check for $10,000, on behalf of the PSF. This was in recognition of our -success over the past few years in bringing PyPy from a research project -to a fast, -compliant, production-ready Python implementation, and to allow us to continue -our work on making it faster and more up-to-date with upstream version changes. +success over the past few years in bringing PyPy from a research project +to a fast, compliant, production-ready Python implementation, and to allow us +to continue our work on making it faster and more up-to-date with upstream +version changes. Beyond the large check, we're grateful for the endorsement this represents, -not only of our work on PyPy, but also of all alternatve Python VMs. -The PSF has -shifted its focus from representing just CPython to representing the Python -Language, reguardless of its implementation, something -we are very appreciative of. +not only of our work on PyPy, but also of all alternatve Python VMs. +The PSF has shifted its focus from representing just CPython to representing +the Python Language, reguardless of its implementation, something we are very +appreciative of. .. image:: xxx.png From commits-noreply at bitbucket.org Mon Mar 21 21:47:42 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:42 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: A branch to allow the astcompiler to work on opened files, in addition to strings. Message-ID: <20110321204742.D57E92A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42823:c3044717968b Date: 2011-03-21 21:45 +0100 http://bitbucket.org/pypy/pypy/changeset/c3044717968b/ Log: A branch to allow the astcompiler to work on opened files, in addition to strings. see issue402 From commits-noreply at bitbucket.org Mon Mar 21 21:47:43 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:43 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Revert part of 3b843b81c6e6, to respect the comment just above Message-ID: <20110321204743.AA0042A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42824:75f6b4394f19 Date: 2011-03-18 10:36 +0100 http://bitbucket.org/pypy/pypy/changeset/75f6b4394f19/ Log: Revert part of 3b843b81c6e6, to respect the comment just above diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -119,11 +119,11 @@ try: textsrc = recode_to_utf8(self.space, textsrc, enc) except OperationError, e: + space = self.space # if the codec is not found, LookupError is raised. we # check using 'is_w' not to mask potential IndexError or # KeyError - space = self.space - if e.match(space, space.w_LookupError): + if space.is_w(e.w_type, space.w_LookupError): raise error.SyntaxError("Unknown encoding: %s" % enc, filename=compile_info.filename) # Transform unicode errors into SyntaxError From commits-noreply at bitbucket.org Mon Mar 21 21:47:44 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:44 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Split PythonParser.parse_source(): after the source lines have been decoded, it now calls build_tree(). Message-ID: <20110321204744.4468E2A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42825:28e5aa4e238d Date: 2011-03-18 11:44 +0100 http://bitbucket.org/pypy/pypy/changeset/28e5aa4e238d/ Log: Split PythonParser.parse_source(): after the source lines have been decoded, it now calls build_tree(). This prepares for a future function parse_file() diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -139,19 +139,29 @@ self.grammar = pygram.python_grammar_no_print else: self.grammar = pygram.python_grammar + source_lines = textsrc.splitlines(True) + + if textsrc and textsrc[-1] == "\n": + compile_info.flags &= ~consts.PyCF_DONT_IMPLY_DEDENT + + if enc is not None: + compile_info.encoding = enc + + return self.build_tree(source_lines, compile_info) + + def build_tree(self, source_lines, compile_info): + """Builds the parse tree from a list of source lines""" # The tokenizer is very picky about how it wants its input. - source_lines = textsrc.splitlines(True) if source_lines and not source_lines[-1].endswith("\n"): source_lines[-1] += '\n' - if textsrc and textsrc[-1] == "\n": - flags &= ~consts.PyCF_DONT_IMPLY_DEDENT self.prepare(_targets[compile_info.mode]) tp = 0 try: try: - tokens = pytokenizer.generate_tokens(source_lines, flags) + tokens = pytokenizer.generate_tokens(source_lines, + compile_info.flags) for tp, value, lineno, column, line in tokens: if self.add_token(tp, value, lineno, column, line): break @@ -176,6 +186,4 @@ finally: # Avoid hanging onto the tree. self.root = None - if enc is not None: - compile_info.encoding = enc return tree From commits-noreply at bitbucket.org Mon Mar 21 21:47:47 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:47 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Override Parser.classify() to handle __future__.print_function, and remove python_grammar_no_print. Message-ID: <20110321204747.31DA82A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42827:5244713e15e6 Date: 2011-03-21 14:23 +0100 http://bitbucket.org/pypy/pypy/changeset/5244713e15e6/ Log: Override Parser.classify() to handle __future__.print_function, and remove python_grammar_no_print. Later, this may avoid the first pass to detect __future__ statements. diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -217,14 +217,15 @@ return self.build_tree(source_lines, compile_info) + def classify(self, token_type, value, *args): + if self.compile_info.flags & consts.CO_FUTURE_PRINT_FUNCTION: + if token_type == self.grammar.KEYWORD_TOKEN and value == 'print': + return self.grammar.token_ids[pygram.tokens.NAME] + return parser.Parser.classify(self, token_type, value, *args) + def build_tree(self, source_lines, compile_info): """Builds the parse tree from a list of source lines""" - if compile_info.flags & consts.CO_FUTURE_PRINT_FUNCTION: - self.grammar = pygram.python_grammar_no_print - else: - self.grammar = pygram.python_grammar - if source_lines and source_lines[-1]: last_line = source_lines[-1] if last_line: @@ -235,6 +236,7 @@ source_lines[-1] += '\n' self.prepare(_targets[compile_info.mode]) + self.compile_info = compile_info tp = 0 try: try: @@ -264,4 +266,5 @@ finally: # Avoid hanging onto the tree. self.root = None + self.compile_info = None return tree diff --git a/pypy/interpreter/pyparser/pygram.py b/pypy/interpreter/pyparser/pygram.py --- a/pypy/interpreter/pyparser/pygram.py +++ b/pypy/interpreter/pyparser/pygram.py @@ -19,9 +19,6 @@ python_grammar = _get_python_grammar() -python_grammar_no_print = python_grammar.shared_copy() -python_grammar_no_print.keyword_ids = python_grammar_no_print.keyword_ids.copy() -del python_grammar_no_print.keyword_ids["print"] class _Tokens(object): pass From commits-noreply at bitbucket.org Mon Mar 21 21:47:48 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:48 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Fix encoding detection when source starts with \n. Message-ID: <20110321204748.A131C2A202B@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42828:04a3c9a274ba Date: 2011-03-21 14:32 +0100 http://bitbucket.org/pypy/pypy/changeset/04a3c9a274ba/ Log: Fix encoding detection when source starts with \n. diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -38,27 +38,38 @@ """, info=info) assert tree.type == syms.file_input assert info.encoding == "iso-8859-1" + sentence = u"u'Die Männer ärgen sich!'" input = (u"# coding: utf-7\nstuff = %s" % (sentence,)).encode("utf-7") tree = self.parse(input, info=info) assert info.encoding == "utf-7" + input = "# coding: iso-8859-15\nx" self.parse(input, info=info) assert info.encoding == "iso-8859-15" + + input = "\n# coding: iso-8859-15\nx" + self.parse(input, info=info) + assert info.encoding == "iso-8859-15" + input = "\xEF\xBB\xBF# coding: utf-8\nx" self.parse(input, info=info) assert info.encoding == "utf-8" + input = "# coding: utf-8\nx" info.flags |= consts.PyCF_SOURCE_IS_UTF8 exc = py.test.raises(SyntaxError, self.parse, input, info=info).value info.flags &= ~consts.PyCF_SOURCE_IS_UTF8 assert exc.msg == "coding declaration in unicode string" + input = "\xEF\xBB\xBF# coding: latin-1\nx" exc = py.test.raises(SyntaxError, self.parse, input).value assert exc.msg == "UTF-8 BOM with non-utf8 coding cookie" + input = "# coding: not-here" exc = py.test.raises(SyntaxError, self.parse, input).value assert exc.msg == "Unknown encoding: not-here" + input = u"# coding: ascii\n\xe2".encode('utf-8') exc = py.test.raises(SyntaxError, self.parse, input).value if isinstance(self, TestPythonFileParser): diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -42,7 +42,7 @@ enc = _check_line_for_encoding(s1[:eol]) if enc: return enc - if eol: + if eol >= 0: if s2: s = s1 + s2 else: From commits-noreply at bitbucket.org Mon Mar 21 21:47:46 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:46 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: New function PythonParser.parse_file(), it accepts a "Stream" to provide source lines Message-ID: <20110321204746.048452A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42826:0b31f3c872f9 Date: 2011-03-18 17:25 +0100 http://bitbucket.org/pypy/pypy/changeset/0b31f3c872f9/ Log: New function PythonParser.parse_file(), it accepts a "Stream" to provide source lines diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -61,8 +61,13 @@ assert exc.msg == "Unknown encoding: not-here" input = u"# coding: ascii\n\xe2".encode('utf-8') exc = py.test.raises(SyntaxError, self.parse, input).value - assert exc.msg == ("'ascii' codec can't decode byte 0xc3 " - "in position 16: ordinal not in range(128)") + if isinstance(self, TestPythonFileParser): + # incremental decoder works line by line + assert exc.msg == ("'ascii' codec can't decode byte 0xc3 " + "in position 0: ordinal not in range(128)") + else: + assert exc.msg == ("'ascii' codec can't decode byte 0xc3 " + "in position 16: ordinal not in range(128)") def test_syntax_error(self): parse = self.parse @@ -144,3 +149,31 @@ self.parse('0b1101') self.parse('0b0l') py.test.raises(SyntaxError, self.parse, "0b112") + +class TestPythonFileParser(TestPythonParser): + def parse(self, source, mode="exec", info=None): + if info is None: + info = pyparse.CompileInfo("", mode) + + space = self.space + + from pypy.interpreter.error import OperationError + import StringIO + + class IOStream(pyparse.Stream): + def __init__(self, source): + self.stream = StringIO.StringIO(source) + def readline(self): + return self.stream.readline() + def recode_to_utf8(self, line, encoding): + try: + if encoding is None or encoding in ('utf-8', 'iso-8859-1'): + return line + return line.decode(encoding).encode('utf-8') + except LookupError, e: + raise OperationError(space.w_LookupError, + space.wrap(e.message)) + except UnicodeDecodeError, e: + raise SyntaxError(str(e)) # The one from pyparser.error! + + return self.parser.parse_file(IOStream(source), info) diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -12,6 +12,7 @@ def recode_to_utf8(space, text, encoding): return space.str_w(_recode_to_utf8(space, space.wrap(text), space.wrap(encoding))) + def _normalize_encoding(encoding): """returns normalized name for @@ -33,17 +34,25 @@ return 'iso-8859-1' return encoding -def _check_for_encoding(s): - eol = s.find('\n') +def _check_for_encoding(s1, s2): + eol = s1.find('\n') if eol < 0: - return _check_line_for_encoding(s) - enc = _check_line_for_encoding(s[:eol]) + enc = _check_line_for_encoding(s1) + else: + enc = _check_line_for_encoding(s1[:eol]) if enc: return enc - eol2 = s.find('\n', eol + 1) - if eol2 < 0: - return _check_line_for_encoding(s[eol + 1:]) - return _check_line_for_encoding(s[eol + 1:eol2]) + if eol: + if s2: + s = s1 + s2 + else: + s = s1 + eol2 = s.find('\n', eol + 1) + if eol2 < 0: + return _check_line_for_encoding(s[eol + 1:]) + return _check_line_for_encoding(s[eol + 1:eol2]) + elif s2: + return _check_line_for_encoding(s2) def _check_line_for_encoding(line): @@ -86,75 +95,144 @@ 'exec' : pygram.syms.file_input, } +class Stream(object): + "Pseudo-file object used by PythonParser.parse_file" + def readline(self): + raise NotImplementedError + def recode_to_utf8(self, text, encoding): + raise NotImplementedError + class PythonParser(parser.Parser): def __init__(self, space, grammar=pygram.python_grammar): parser.Parser.__init__(self, grammar) self.space = space + def _detect_encoding(self, text1, text2, compile_info): + "Detect source encoding from the beginning of the file" + if text1.startswith("\xEF\xBB\xBF"): + text1 = text1[3:] + compile_info.encoding = 'utf-8' + # If an encoding is explicitly given check that it is utf-8. + decl_enc = _check_for_encoding(text1, text2) + if decl_enc and decl_enc != "utf-8": + raise error.SyntaxError("UTF-8 BOM with non-utf8 coding cookie", + filename=compile_info.filename) + elif compile_info.flags & consts.PyCF_SOURCE_IS_UTF8: + compile_info.encoding = 'utf-8' + if _check_for_encoding(text1, text2) is not None: + raise error.SyntaxError("coding declaration in unicode string", + filename=compile_info.filename) + else: + compile_info.encoding = _normalize_encoding( + _check_for_encoding(text1, text2)) + return text1 + + def _decode_error(self, e, compile_info): + space = self.space + # if the codec is not found, LookupError is raised. we + # check using 'is_w' not to mask potential IndexError or + # KeyError + if space.is_w(e.w_type, space.w_LookupError): + return error.SyntaxError( + "Unknown encoding: %s" % compile_info.encoding, + filename=compile_info.filename) + # Transform unicode errors into SyntaxError + if e.match(space, space.w_UnicodeDecodeError): + e.normalize_exception(space) + w_message = space.str(e.get_w_value(space)) + return error.SyntaxError(space.str_w(w_message)) + def parse_source(self, textsrc, compile_info): """Main entry point for parsing Python source. Everything from decoding the source to tokenizing to building the parse tree is handled here. """ - # Detect source encoding. - enc = None - if textsrc.startswith("\xEF\xBB\xBF"): - textsrc = textsrc[3:] - enc = 'utf-8' - # If an encoding is explicitly given check that it is utf-8. - decl_enc = _check_for_encoding(textsrc) - if decl_enc and decl_enc != "utf-8": - raise error.SyntaxError("UTF-8 BOM with non-utf8 coding cookie", - filename=compile_info.filename) - elif compile_info.flags & consts.PyCF_SOURCE_IS_UTF8: - enc = 'utf-8' - if _check_for_encoding(textsrc) is not None: - raise error.SyntaxError("coding declaration in unicode string", - filename=compile_info.filename) - else: - enc = _normalize_encoding(_check_for_encoding(textsrc)) - if enc is not None and enc not in ('utf-8', 'iso-8859-1'): - try: - textsrc = recode_to_utf8(self.space, textsrc, enc) - except OperationError, e: - space = self.space - # if the codec is not found, LookupError is raised. we - # check using 'is_w' not to mask potential IndexError or - # KeyError - if space.is_w(e.w_type, space.w_LookupError): - raise error.SyntaxError("Unknown encoding: %s" % enc, - filename=compile_info.filename) - # Transform unicode errors into SyntaxError - if e.match(space, space.w_UnicodeDecodeError): - e.normalize_exception(space) - w_message = space.str(e.get_w_value(space)) - raise error.SyntaxError(space.str_w(w_message)) + textsrc = self._detect_encoding(textsrc, None, compile_info) + + enc = compile_info.encoding + if enc is not None and enc not in ('utf-8', 'iso-8859-1'): + try: + textsrc = recode_to_utf8(self.space, textsrc, enc) + except OperationError, e: + operror = self._decode_error(e, compile_info) + if operror: + raise operror + else: raise - flags = compile_info.flags - - if flags & consts.CO_FUTURE_PRINT_FUNCTION: - self.grammar = pygram.python_grammar_no_print - else: - self.grammar = pygram.python_grammar source_lines = textsrc.splitlines(True) - if textsrc and textsrc[-1] == "\n": - compile_info.flags &= ~consts.PyCF_DONT_IMPLY_DEDENT + return self.build_tree(source_lines, compile_info) - if enc is not None: - compile_info.encoding = enc + def parse_file(self, stream, compile_info): + assert isinstance(stream, Stream) + + firstline = stream.readline() + secondline = None + if firstline: + secondline = stream.readline() + if secondline: + firstline = self._detect_encoding( + firstline, secondline, compile_info) + else: + firstline = self._detect_encoding( + firstline, '', compile_info) + + enc = compile_info.encoding + if enc in ('utf-8', 'iso-8859-1'): + enc = None # No need to recode + + source_lines = [] + + if enc is None: + if firstline: + source_lines.append(firstline) + if secondline: + source_lines.append(secondline) + while True: + line = stream.readline() + if not line: + break + source_lines.append(line) + else: + try: + if firstline: + source_lines.append(stream.recode_to_utf8(firstline, enc)) + if secondline: + source_lines.append(stream.recode_to_utf8(secondline, enc)) + + while True: + line = stream.readline() + if not line: + break + source_lines.append(stream.recode_to_utf8(line, enc)) + except OperationError, e: + operror = self._decode_error(e, compile_info) + if operror: + raise operror + else: + raise return self.build_tree(source_lines, compile_info) def build_tree(self, source_lines, compile_info): """Builds the parse tree from a list of source lines""" - # The tokenizer is very picky about how it wants its input. - if source_lines and not source_lines[-1].endswith("\n"): - source_lines[-1] += '\n' + if compile_info.flags & consts.CO_FUTURE_PRINT_FUNCTION: + self.grammar = pygram.python_grammar_no_print + else: + self.grammar = pygram.python_grammar + + if source_lines and source_lines[-1]: + last_line = source_lines[-1] + if last_line: + if last_line[-1] == "\n": + compile_info.flags &= ~consts.PyCF_DONT_IMPLY_DEDENT + else: + # The tokenizer is very picky about how it wants its input. + source_lines[-1] += '\n' self.prepare(_targets[compile_info.mode]) tp = 0 From commits-noreply at bitbucket.org Mon Mar 21 21:47:51 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 21:47:51 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Use the Python grammar to process __future__ imports, Message-ID: <20110321204751.075F62A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42829:158c353210eb Date: 2011-03-21 21:29 +0100 http://bitbucket.org/pypy/pypy/changeset/158c353210eb/ Log: Use the Python grammar to process __future__ imports, and get rid of the hand-made FutureAutomaton diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -1,306 +1,33 @@ -""" -This automaton is designed to be invoked on a Python source string -before the real parser starts working, in order to find all legal -'from __future__ import blah'. As soon as something is encountered that -would prevent more future imports, the analysis is aborted. -The resulting legal futures are avaliable in self.flags after the -pass has ended. +from pypy.interpreter.astcompiler import ast +from pypy.tool import stdlib___future__ as future -Invocation is through get_futures(src), which returns a field of flags, one per -found correct future import. +def get_futures(future_flags, tree): + flags = 0 -The flags can then be used to set up the parser. -All error detection is left to the parser. + if not isinstance(tree, (ast.Module, ast.Interactive)): + return flags, (0, 0) -The reason we are not using the regular lexer/parser toolchain is that -we do not want the overhead of generating tokens for entire files just -to find information that resides in the first few lines of the file. -Neither do we require sane error messages, as this job is handled by -the parser. + if not tree.body: + return flags, (0, 0) -To make the parsing fast, especially when the module is translated to C, -the code has been written in a very serial fashion, using an almost -assembler like style. A further speedup could be achieved by replacing -the "in" comparisons with explicit numeric comparisons. -""" + found_docstring = False -from pypy.interpreter.astcompiler.consts import CO_GENERATOR_ALLOWED, \ - CO_FUTURE_DIVISION, CO_FUTURE_WITH_STATEMENT, CO_FUTURE_ABSOLUTE_IMPORT - -def get_futures(future_flags, source): - futures = FutureAutomaton(future_flags, source) - try: - futures.start() - except DoneException, e: - pass - return futures.flags, (futures.lineno, futures.col_offset) - -class DoneException(Exception): - pass - -whitespace = ' \t\f' -whitespace_or_newline = whitespace + '\n\r' -letters = 'ABCDEFGHIJKLMNOPQRSTUVWXYabcdefghijklmnopqrstuvwxyz_' -alphanumerics = letters + '1234567890' - -class FutureAutomaton(object): - """ - A future statement must appear near the top of the module. - The only lines that can appear before a future statement are: - - * the module docstring (if any), - * comments, - * blank lines, and - * other future statements. - - The features recognized by Python 2.5 are "generators", - "division", "nested_scopes" and "with_statement", "absolute_import". - "generators", "division" and "nested_scopes" are redundant - in 2.5 because they are always enabled. - - This module parses the input until it encounters something that is - not recognized as a valid future statement or something that may - precede a future statement. - """ - - def __init__(self, future_flags, string): - self.future_flags = future_flags - self.s = string - self.pos = 0 - self.current_lineno = 1 - self.lineno = -1 - self.line_start_pos = 0 - self.col_offset = 0 - self.docstring_consumed = False - self.flags = 0 - self.got_features = 0 - - def getc(self, offset=0): - try: - return self.s[self.pos + offset] - except IndexError: - raise DoneException - - def start(self): - c = self.getc() - if c in ("'", '"', "r", "u") and not self.docstring_consumed: - self.consume_docstring() - elif c == '\\' or c in whitespace_or_newline: - self.consume_empty_line() - elif c == '#': - self.consume_comment() - elif c == 'f': - self.consume_from() - else: - return - - def atbol(self): - self.current_lineno += 1 - self.line_start_pos = self.pos - - def consume_docstring(self): - self.docstring_consumed = True - if self.getc() == "r": - self.pos += 1 - if self.getc() == "u": - self.pos += 1 - endchar = self.getc() - if (self.getc() == self.getc(+1) and - self.getc() == self.getc(+2)): - self.pos += 3 - while 1: # Deal with a triple quoted docstring - if self.getc() == '\\': - self.pos += 2 - else: - c = self.getc() - if c != endchar: - self.pos += 1 - if c == '\n': - self.atbol() - elif c == '\r': - if self.getc() == '\n': - self.pos += 1 - self.atbol() - else: - self.pos += 1 - if (self.getc() == endchar and - self.getc(+1) == endchar): - self.pos += 2 - self.consume_empty_line() - break - - else: # Deal with a single quoted docstring - self.pos += 1 - while 1: - c = self.getc() - self.pos += 1 - if c == endchar: - self.consume_empty_line() - return - elif c == '\\': - # Deal with linefeeds - if self.getc() != '\r': - self.pos += 1 - else: - self.pos += 1 - if self.getc() == '\n': - self.pos += 1 - elif c in '\r\n': - # Syntax error - return - - def consume_continuation(self): - c = self.getc() - if c in '\n\r': - self.pos += 1 - self.atbol() - - def consume_empty_line(self): - """ - Called when the remainder of the line can only contain whitespace - and comments. - """ - while self.getc() in whitespace: - self.pos += 1 - if self.getc() == '#': - self.consume_comment() - elif self.getc() == ';': - self.pos += 1 - self.consume_whitespace() - self.start() - elif self.getc() in '\\': - self.pos += 1 - self.consume_continuation() - self.start() - elif self.getc() in '\r\n': - c = self.getc() - self.pos += 1 - if c == '\r': - if self.getc() == '\n': - self.pos += 1 - self.atbol() - else: - self.atbol() - self.start() - - def consume_comment(self): - self.pos += 1 - while self.getc() not in '\r\n': - self.pos += 1 - self.consume_empty_line() - - def consume_from(self): - col_offset = self.pos - self.line_start_pos - line = self.current_lineno - self.pos += 1 - if self.getc() == 'r' and self.getc(+1) == 'o' and self.getc(+2) == 'm': - self.docstring_consumed = True - self.pos += 3 - self.consume_mandatory_whitespace() - if self.s[self.pos:self.pos+10] != '__future__': - raise DoneException - self.pos += 10 - self.consume_mandatory_whitespace() - if self.s[self.pos:self.pos+6] != 'import': - raise DoneException - self.pos += 6 - self.consume_whitespace() - old_got = self.got_features - try: - if self.getc() == '(': - self.pos += 1 - self.consume_whitespace() - self.set_flag(self.get_name()) - # Set flag corresponding to name - self.get_more(paren_list=True) - else: - self.set_flag(self.get_name()) - self.get_more() - finally: - if self.got_features > old_got: - self.col_offset = col_offset - self.lineno = line - self.consume_empty_line() - - def consume_mandatory_whitespace(self): - if self.getc() not in whitespace + '\\': - raise DoneException - self.consume_whitespace() - - def consume_whitespace(self): - while 1: - c = self.getc() - if c in whitespace: - self.pos += 1 - continue - elif c == '\\': - self.pos += 1 - c = self.getc() - if c == '\n': - self.pos += 1 - self.atbol() - continue - elif c == '\r': - self.pos += 1 - if self.getc() == '\n': - self.pos += 1 - self.atbol() - else: - raise DoneException - else: - return - - def get_name(self): - if self.getc() not in letters: - raise DoneException - p = self.pos - try: - while self.getc() in alphanumerics: - self.pos += 1 - except DoneException: - # If there's any name at all, we want to call self.set_flag(). - # Something else while get the DoneException again. - if self.pos == p: - raise - end = self.pos - else: - end = self.pos - self.consume_whitespace() - return self.s[p:end] - - def get_more(self, paren_list=False): - if paren_list and self.getc() == ')': - self.pos += 1 - return - if (self.getc() == 'a' and - self.getc(+1) == 's' and - self.getc(+2) in whitespace): - self.get_name() - self.get_name() - self.get_more(paren_list=paren_list) - return - elif self.getc() != ',': - return - else: - self.pos += 1 - self.consume_whitespace() - if paren_list and self.getc() == ')': - self.pos += 1 - return # Handles trailing comma inside parenthesis - self.set_flag(self.get_name()) - self.get_more(paren_list=paren_list) - - def set_flag(self, feature): - self.got_features += 1 - try: - self.flags |= self.future_flags.compiler_features[feature] - except KeyError: - pass - -from codeop import PyCF_DONT_IMPLY_DEDENT -from pypy.interpreter.error import OperationError - -from pypy.tool import stdlib___future__ as future + for elem in tree.body: + if isinstance(elem, ast.ImportFrom): + if elem.module != '__future__': + break + for alias in elem.names: + name = alias.name + try: + flags |= future_flags.compiler_features[name] + except KeyError: + pass + elif isinstance(elem, ast.Expr): + if found_docstring: + break + if isinstance(elem.value, ast.Str): + found_docstring = True + return flags, (elem.lineno, elem.col_offset) class FutureFlags(object): diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py --- a/pypy/interpreter/pycompiler.py +++ b/pypy/interpreter/pycompiler.py @@ -140,11 +140,11 @@ def _compile_to_ast(self, source, info): space = self.space try: - f_flags, future_info = future.get_futures(self.future_flags, source) + parse_tree = self.parser.parse_source(source, info) + mod = astbuilder.ast_from_node(space, parse_tree, info) + f_flags, future_info = future.get_futures(self.future_flags, mod) info.last_future_import = future_info info.flags |= f_flags - parse_tree = self.parser.parse_source(source, info) - mod = astbuilder.ast_from_node(space, parse_tree, info) except parseerror.IndentationError, e: raise OperationError(space.w_IndentationError, e.wrap_info(space)) diff --git a/pypy/interpreter/pyparser/parser.py b/pypy/interpreter/pyparser/parser.py --- a/pypy/interpreter/pyparser/parser.py +++ b/pypy/interpreter/pyparser/parser.py @@ -182,3 +182,4 @@ self.stack[-1][2].children.append(node) else: self.root = node + return node diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -217,12 +217,51 @@ return self.build_tree(source_lines, compile_info) + def parse_future_import(self, node): + if node.type != self.grammar.symbol_ids['import_from']: + return + children = node.children + # from __future__ import ..., must have at least 4 children + if len(children) < 4: + return + if children[0].value != 'from': + return + if len(children[1].children) != 1: + return + if children[1].children[0].value != '__future__': + return + + child = children[3] + # child can be a star, a parenthesis or import_as_names + if child.type == pygram.tokens.STAR: + return + if child.type == pygram.tokens.LPAR: + child = children[4] + + for i in range(0, len(child.children), 2): + c = child.children[i] + if (len(c.children) >= 1 and + c.children[0].type == pygram.tokens.NAME): + name = c.children[0].value + + if name == 'print_function': + self.compile_info.flags |= consts.CO_FUTURE_PRINT_FUNCTION + elif name == 'with_statement': + self.compile_info.flags |= consts.CO_FUTURE_WITH_STATEMENT + elif name == 'unicode_literals': + self.compile_info.flags |= consts.CO_FUTURE_UNICODE_LITERALS + def classify(self, token_type, value, *args): if self.compile_info.flags & consts.CO_FUTURE_PRINT_FUNCTION: if token_type == self.grammar.KEYWORD_TOKEN and value == 'print': return self.grammar.token_ids[pygram.tokens.NAME] return parser.Parser.classify(self, token_type, value, *args) + def pop(self): + node = parser.Parser.pop(self) + self.parse_future_import(node) + return node + def build_tree(self, source_lines, compile_info): """Builds the parse tree from a list of source lines""" diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -161,6 +161,22 @@ self.parse('0b0l') py.test.raises(SyntaxError, self.parse, "0b112") + def test_future_import(self): + def parse_with_info(source): + info = pyparse.CompileInfo("", "exec", 0) + self.parse(source, info=info) + return info.flags + source = 'from __future__ import print_function' + assert parse_with_info(source) == consts.CO_FUTURE_PRINT_FUNCTION + source = 'from __future__ import print_function, with_statement' + assert parse_with_info(source) == (consts.CO_FUTURE_PRINT_FUNCTION | + consts.CO_FUTURE_WITH_STATEMENT) + source = 'from __future__ import (print_function,\nwith_statement)' + assert parse_with_info(source) == (consts.CO_FUTURE_PRINT_FUNCTION | + consts.CO_FUTURE_WITH_STATEMENT) + source = 'from __future__ import *' + assert parse_with_info(source) == 0 + class TestPythonFileParser(TestPythonParser): def parse(self, source, mode="exec", info=None): if info is None: From commits-noreply at bitbucket.org Mon Mar 21 22:26:30 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 22:26:30 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: close the subrepo-removal branch Message-ID: <20110321212630.EE49F2A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42830:d58950c54ca9 Date: 2011-03-21 18:40 +0100 http://bitbucket.org/pypy/pypy/changeset/d58950c54ca9/ Log: close the subrepo-removal branch From commits-noreply at bitbucket.org Mon Mar 21 22:26:31 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 22:26:31 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: skip the rcoroutine tests if greenlet is missing Message-ID: <20110321212631.AD2AB2A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42831:bc304bdae8e6 Date: 2011-03-21 21:53 +0100 http://bitbucket.org/pypy/pypy/changeset/bc304bdae8e6/ Log: skip the rcoroutine tests if greenlet is missing diff --git a/pypy/rlib/test/test_rcoroutine.py b/pypy/rlib/test/test_rcoroutine.py --- a/pypy/rlib/test/test_rcoroutine.py +++ b/pypy/rlib/test/test_rcoroutine.py @@ -1,13 +1,16 @@ """ testing coroutines at interprepter level """ - +import py import os from pypy import conftest; conftest.translation_test_so_skip_if_appdirect() from pypy.rlib.rcoroutine import make_coroutine_classes from pypy.translator.c.test.test_stackless import StacklessTest from pypy.translator.c import gc +def setup_module(mod): + py.test.importorskip('greenlet') + d = make_coroutine_classes(object) syncstate = d['syncstate'] Coroutine = d['Coroutine'] From commits-noreply at bitbucket.org Mon Mar 21 22:26:32 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 22:26:32 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: skip the libpypy/test_distributed tests if greenlet is missing Message-ID: <20110321212632.4D1C52A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42832:491e770599a2 Date: 2011-03-21 22:11 +0100 http://bitbucket.org/pypy/pypy/changeset/491e770599a2/ Log: skip the libpypy/test_distributed tests if greenlet is missing diff --git a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py --- a/pypy/module/test_lib_pypy/test_distributed/test_distributed.py +++ b/pypy/module/test_lib_pypy/test_distributed/test_distributed.py @@ -91,19 +91,22 @@ class AppTestDistributedTasklets(object): spaceconfig = {"objspace.std.withtproxy": True, "objspace.usemodules._stackless": True} + reclimit = sys.getrecursionlimit() + def setup_class(cls): + import py.test + py.test.importorskip('greenlet') #cls.space = gettestobjspace(**{"objspace.std.withtproxy": True, # "usemodules":("_stackless",)}) cls.w_test_env_ = cls.space.appexec([], """(): from distributed import test_env return (test_env,) """) - cls.reclimit = sys.getrecursionlimit() sys.setrecursionlimit(100000) def teardown_class(cls): sys.setrecursionlimit(cls.reclimit) - + def test_remote_protocol_call(self): def f(x, y): return x + y From commits-noreply at bitbucket.org Mon Mar 21 22:26:32 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Mon, 21 Mar 2011 22:26:32 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: skip the lib pypy stackless tests if greenlet is missing Message-ID: <20110321212632.D2B302A2002@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42833:d770660acf0a Date: 2011-03-21 22:26 +0100 http://bitbucket.org/pypy/pypy/changeset/d770660acf0a/ Log: skip the lib pypy stackless tests if greenlet is missing diff --git a/pypy/module/test_lib_pypy/test_stackless.py b/pypy/module/test_lib_pypy/test_stackless.py --- a/pypy/module/test_lib_pypy/test_stackless.py +++ b/pypy/module/test_lib_pypy/test_stackless.py @@ -3,6 +3,8 @@ class AppTest_Stackless: def setup_class(cls): + import py.test + py.test.importorskip('greenlet') space = gettestobjspace(usemodules=('_stackless', '_socket')) cls.space = space # cannot test the unpickle part on top of py.py From commits-noreply at bitbucket.org Mon Mar 21 23:21:21 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 23:21:21 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the tests about PyThread_acquire_lock and PyThread_release_lock Message-ID: <20110321222121.99CDE2A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42834:00f07d6abcc0 Date: 2011-03-21 23:13 +0100 http://bitbucket.org/pypy/pypy/changeset/00f07d6abcc0/ Log: Fix the tests about PyThread_acquire_lock and PyThread_release_lock diff --git a/pypy/module/cpyext/include/pythread.h b/pypy/module/cpyext/include/pythread.h --- a/pypy/module/cpyext/include/pythread.h +++ b/pypy/module/cpyext/include/pythread.h @@ -0,0 +1,8 @@ +#ifndef Py_PYTHREAD_H +#define Py_PYTHREAD_H + +typedef void *PyThread_type_lock; +#define WAIT_LOCK 1 +#define NOWAIT_LOCK 0 + +#endif diff --git a/pypy/module/cpyext/thread.py b/pypy/module/cpyext/thread.py --- a/pypy/module/cpyext/thread.py +++ b/pypy/module/cpyext/thread.py @@ -1,13 +1,22 @@ from pypy.module.thread import ll_thread from pypy.module.cpyext.api import CANNOT_FAIL, cpython_api -from pypy.rpython.lltypesystem import rffi +from pypy.rpython.lltypesystem import lltype, rffi @cpython_api([], rffi.LONG, error=CANNOT_FAIL) def PyThread_get_thread_ident(space): return ll_thread.get_ident() +LOCKP = rffi.COpaquePtr(typedef='PyThread_type_lock') -# @cpython_api([ll_thread.TLOCKP, rffi.INT], rffi.INT, error=CANNOT_FAIL) -# def PyThread_acquire_lock(space, lock, waitflag): -# return ll_thread.Lock(lock).acquire(waitflag) + at cpython_api([LOCKP, rffi.INT], rffi.INT, error=CANNOT_FAIL) +def PyThread_acquire_lock(space, lock, waitflag): + lock = rffi.cast(ll_thread.TLOCKP, lock) + return ll_thread.c_thread_acquirelock(lock, waitflag) + + at cpython_api([LOCKP], lltype.Void) +def PyThread_release_lock(space, lock): + lock = rffi.cast(ll_thread.TLOCKP, lock) + ll_thread.c_thread_releaselock(lock) + + diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -119,6 +119,7 @@ #include "pystate.h" #include "fileobject.h" #include "pysignals.h" +#include "pythread.h" // XXX This shouldn't be included here #include "structmember.h" diff --git a/pypy/module/cpyext/test/test_thread.py b/pypy/module/cpyext/test/test_thread.py --- a/pypy/module/cpyext/test/test_thread.py +++ b/pypy/module/cpyext/test/test_thread.py @@ -24,17 +24,15 @@ assert results[0][0] != results[1][0] - @py.test.mark.xfail def test_acquire_lock(self, space, api): assert hasattr(api, 'PyThread_acquire_lock') lock = allocate_ll_lock() - assert api.PyThread_acquire_lock(lock, space.w_int(0)) == 1 - assert api.PyThread_acquire_lock(lock, space.w_int(1)) == 0 + assert api.PyThread_acquire_lock(lock, 1) == 1 + assert api.PyThread_acquire_lock(lock, 0) == 0 - @py.test.mark.xfail def test_release_lock(self, space, api): assert hasattr(api, 'PyThread_acquire_lock') lock = allocate_ll_lock() - api.PyThread_acquire_lock(lock, space.w_int(0)) + api.PyThread_acquire_lock(lock, 1) api.PyThread_release_lock(lock) - assert api.PyThread_acquire_lock(lock, space.w_int(0)) == 1 + assert api.PyThread_acquire_lock(lock, 0) == 1 From commits-noreply at bitbucket.org Mon Mar 21 23:21:22 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 23:21:22 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: expose PyThread_allocate_lock, PyThread_free_lock Message-ID: <20110321222122.8CCCA2A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42835:82c910237508 Date: 2011-03-21 23:21 +0100 http://bitbucket.org/pypy/pypy/changeset/82c910237508/ Log: cpyext: expose PyThread_allocate_lock, PyThread_free_lock diff --git a/pypy/module/thread/ll_thread.py b/pypy/module/thread/ll_thread.py --- a/pypy/module/thread/ll_thread.py +++ b/pypy/module/thread/ll_thread.py @@ -111,7 +111,7 @@ c_thread_releaselock(self._lock) def __del__(self): - lltype.free(self._lock, flavor='raw', track_allocation=False) + free_ll_lock(self._lock) # ____________________________________________________________ # @@ -138,6 +138,9 @@ raise error("out of resources") return ll_lock +def free_ll_lock(ll_lock): + lltype.free(ll_lock, flavor='raw', track_allocation=False) + def acquire_NOAUTO(ll_lock, flag): flag = rffi.cast(rffi.INT, int(flag)) res = c_thread_acquirelock_NOAUTO(ll_lock, flag) diff --git a/pypy/module/cpyext/thread.py b/pypy/module/cpyext/thread.py --- a/pypy/module/cpyext/thread.py +++ b/pypy/module/cpyext/thread.py @@ -9,6 +9,16 @@ LOCKP = rffi.COpaquePtr(typedef='PyThread_type_lock') + at cpython_api([], LOCKP) +def PyThread_allocate_lock(space): + lock = ll_thread.allocate_ll_lock() + return rffi.cast(LOCKP, lock) + + at cpython_api([LOCKP], lltype.Void) +def PyThread_free_lock(space, lock): + lock = rffi.cast(ll_thread.TLOCKP, lock) + ll_thread.free_ll_lock(lock) + @cpython_api([LOCKP, rffi.INT], rffi.INT, error=CANNOT_FAIL) def PyThread_acquire_lock(space, lock, waitflag): lock = rffi.cast(ll_thread.TLOCKP, lock) diff --git a/pypy/module/cpyext/test/test_thread.py b/pypy/module/cpyext/test/test_thread.py --- a/pypy/module/cpyext/test/test_thread.py +++ b/pypy/module/cpyext/test/test_thread.py @@ -26,13 +26,15 @@ def test_acquire_lock(self, space, api): assert hasattr(api, 'PyThread_acquire_lock') - lock = allocate_ll_lock() + lock = api.PyThread_allocate_lock() assert api.PyThread_acquire_lock(lock, 1) == 1 assert api.PyThread_acquire_lock(lock, 0) == 0 + api.PyThread_free_lock(lock) def test_release_lock(self, space, api): assert hasattr(api, 'PyThread_acquire_lock') - lock = allocate_ll_lock() + lock = api.PyThread_allocate_lock() api.PyThread_acquire_lock(lock, 1) api.PyThread_release_lock(lock) assert api.PyThread_acquire_lock(lock, 0) == 1 + api.PyThread_free_lock(lock) From commits-noreply at bitbucket.org Mon Mar 21 23:23:21 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Mon, 21 Mar 2011 23:23:21 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Fix all tests in pypy/interpreter; Message-ID: <20110321222321.EDAB12A2002@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42836:2e7094ba734b Date: 2011-03-21 23:23 +0100 http://bitbucket.org/pypy/pypy/changeset/2e7094ba734b/ Log: Fix all tests in pypy/interpreter; port test_automation tests as well diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -3,12 +3,13 @@ def get_futures(future_flags, tree): flags = 0 + pos = (-1, 0) if not isinstance(tree, (ast.Module, ast.Interactive)): - return flags, (0, 0) + return flags, pos if not tree.body: - return flags, (0, 0) + return flags, pos found_docstring = False @@ -22,12 +23,13 @@ flags |= future_flags.compiler_features[name] except KeyError: pass + pos = elem.lineno, elem.col_offset elif isinstance(elem, ast.Expr): if found_docstring: break if isinstance(elem.value, ast.Str): found_docstring = True - return flags, (elem.lineno, elem.col_offset) + return flags, pos class FutureFlags(object): diff --git a/pypy/interpreter/pyparser/test/test_futureautomaton.py b/pypy/interpreter/pyparser/test/test_futureautomaton.py --- a/pypy/interpreter/pyparser/test/test_futureautomaton.py +++ b/pypy/interpreter/pyparser/test/test_futureautomaton.py @@ -1,222 +1,198 @@ import py -import pypy.interpreter.pyparser.future as future +from pypy.interpreter.pyparser import future, pyparse +from pypy.interpreter.astcompiler import astbuilder from pypy.tool import stdlib___future__ as fut -def run(s): - f = future.FutureAutomaton(future.futureFlags_2_5, s) - try: - f.start() - except future.DoneException: - pass +class F(object): + pass + +def run(space, source): + parser = pyparse.PythonParser(space) + info = pyparse.CompileInfo("", "exec") + tree = parser.parse_source(source, info) + mod = astbuilder.ast_from_node(space, tree, info) + f = F() + f.flags, (f.lineno, f.col_offset) = \ + future.get_futures(future.futureFlags_2_5, mod) return f -def test_docstring(): +def test_docstring(space): s = '"Docstring\\" "\nfrom __future__ import division\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_DIVISION assert f.lineno == 2 assert f.col_offset == 0 -def test_comment(): +def test_comment(space): s = '# A comment about nothing ;\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.lineno == -1 assert f.col_offset == 0 -def test_tripledocstring(): +def test_tripledocstring(space): s = '''""" This is a docstring with line breaks in it. It even has a \n""" ''' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.lineno == -1 assert f.col_offset == 0 -def test_escapedquote_in_tripledocstring(): +def test_escapedquote_in_tripledocstring(space): s = '''""" This is a docstring with line breaks in it. \\"""It even has an escaped quote!""" ''' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.lineno == -1 assert f.col_offset == 0 -def test_empty_line(): +def test_empty_line(space): s = ' \t \f \n \n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.lineno == -1 assert f.col_offset == 0 -def test_from(): +def test_from(space): s = 'from __future__ import division\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_DIVISION assert f.lineno == 1 assert f.col_offset == 0 -def test_froms(): +def test_froms(space): s = 'from __future__ import division, generators, with_statement\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) assert f.lineno == 1 assert f.col_offset == 0 -def test_from_as(): +def test_from_as(space): s = 'from __future__ import division as b\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_DIVISION assert f.lineno == 1 assert f.col_offset == 0 -def test_froms_as(): +def test_froms_as(space): s = 'from __future__ import division as b, generators as c\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) assert f.lineno == 1 assert f.col_offset == 0 -def test_from_paren(): +def test_from_paren(space): s = 'from __future__ import (division)\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_DIVISION assert f.lineno == 1 assert f.col_offset == 0 -def test_froms_paren(): +def test_froms_paren(space): s = 'from __future__ import (division, generators)\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) assert f.lineno == 1 assert f.col_offset == 0 -def test_froms_paren_as(): +def test_froms_paren_as(space): s = 'from __future__ import (division as b, generators,)\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED) assert f.lineno == 1 assert f.col_offset == 0 -def test_multiline(): +def test_multiline(space): s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,)\nfrom __future__ import with_statement\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) assert f.lineno == 4 assert f.col_offset == 0 -def test_windows_style_lineendings(): +def test_windows_style_lineendings(space): s = '"abc" #def\r\n #ghi\r\nfrom __future__ import (division as b, generators,)\r\nfrom __future__ import with_statement\r\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) assert f.lineno == 4 assert f.col_offset == 0 -def test_mac_style_lineendings(): +def test_mac_style_lineendings(space): s = '"abc" #def\r #ghi\rfrom __future__ import (division as b, generators,)\rfrom __future__ import with_statement\r' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) assert f.lineno == 4 assert f.col_offset == 0 -def test_semicolon(): +def test_semicolon(space): s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,); from __future__ import with_statement\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == (fut.CO_FUTURE_DIVISION | fut.CO_GENERATOR_ALLOWED | fut.CO_FUTURE_WITH_STATEMENT) assert f.lineno == 3 assert f.col_offset == 55 -def test_full_chain(): - s = '"abc" #def\n #ghi\nfrom __future__ import (division as b, generators,); from __future__ import with_statement\n' - flags, pos = future.get_futures(future.futureFlags_2_5, s) - assert flags == (fut.CO_FUTURE_DIVISION | - fut.CO_GENERATOR_ALLOWED | - fut.CO_FUTURE_WITH_STATEMENT) - assert pos == (3, 55) +def test_intervening_code(space): + s = 'from __future__ import (division as b, generators,)\nfrom sys import modules\nfrom __future__ import with_statement\n' + f = run(space, s) + assert f.flags & fut.CO_FUTURE_WITH_STATEMENT == 0 + assert f.lineno == 1 + assert f.col_offset == 0 -def test_intervening_code(): - s = 'from __future__ import (division as b, generators,)\nfrom sys import modules\nfrom __future__ import with_statement\n' - flags, pos = future.get_futures(future.futureFlags_2_5, s) - assert flags & fut.CO_FUTURE_WITH_STATEMENT == 0 - assert pos == (1, 0) - -def test_nonexisting(): +def test_nonexisting(space): s = 'from __future__ import non_existing_feature\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == 0 assert f.lineno == 1 assert f.col_offset == 0 -def test_from_import_abs_import(): +def test_from_import_abs_import(space): s = 'from __future__ import absolute_import\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_ABSOLUTE_IMPORT assert f.lineno == 1 assert f.col_offset == 0 -def test_raw_doc(): +def test_raw_doc(space): s = 'r"Doc"\nfrom __future__ import with_statement\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT assert f.lineno == 2 assert f.col_offset == 0 -def test_unicode_doc(): +def test_unicode_doc(space): s = 'u"Doc"\nfrom __future__ import with_statement\n' - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT assert f.lineno == 2 assert f.col_offset == 0 -def test_raw_unicode_doc(): - s = 'ru"Doc"\nfrom __future__ import with_statement\n' - f = run(s) - assert f.pos == len(s) +def test_raw_unicode_doc(space): + s = 'ur"Doc"\nfrom __future__ import with_statement\n' + f = run(space, s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT -def test_continuation_line(): +def test_continuation_line(space): s = "\\\nfrom __future__ import with_statement\n" - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT assert f.lineno == 2 assert f.col_offset == 0 -def test_continuation_lines(): +def test_continuation_lines(space): s = "\\\n \t\\\nfrom __future__ import with_statement\n" - f = run(s) - assert f.pos == len(s) + f = run(space, s) assert f.flags == fut.CO_FUTURE_WITH_STATEMENT assert f.lineno == 3 assert f.col_offset == 0 @@ -224,14 +200,13 @@ # This looks like a bug in cpython parser # and would require extensive modifications # to future.py in order to emulate the same behaviour -def test_continuation_lines_raise(): +def test_continuation_lines_raise(space): py.test.skip("probably a CPython bug") s = " \\\n \t\\\nfrom __future__ import with_statement\n" try: - f = run(s) + f = run(space, s) except IndentationError, e: assert e.args == 'unexpected indent' - assert f.pos == len(s) assert f.flags == 0 assert f.lineno == -1 assert f.col_offset == 0 diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -226,7 +226,7 @@ return if children[0].value != 'from': return - if len(children[1].children) != 1: + if not children[1].children or len(children[1].children) != 1: return if children[1].children[0].value != '__future__': return From commits-noreply at bitbucket.org Tue Mar 22 00:16:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 00:16:57 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Remove duplicate Message-ID: <20110321231657.2425436C20A@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42837:ff651a2873a5 Date: 2011-03-21 23:51 +0100 http://bitbucket.org/pypy/pypy/changeset/ff651a2873a5/ Log: Remove duplicate diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py --- a/pypy/interpreter/pycompiler.py +++ b/pypy/interpreter/pycompiler.py @@ -119,7 +119,7 @@ raise OperationError(self.space.w_TypeError, self.space.wrap( "invalid node type")) - future_pos = misc.parse_future(node) + _, future_pos = future.get_futures(self.future_flags, node) info = pyparse.CompileInfo(filename, mode, flags, future_pos) return self._compile_ast(node, info) diff --git a/pypy/interpreter/astcompiler/misc.py b/pypy/interpreter/astcompiler/misc.py --- a/pypy/interpreter/astcompiler/misc.py +++ b/pypy/interpreter/astcompiler/misc.py @@ -27,33 +27,6 @@ _emit_syntax_warning(space, w_msg, w_filename, w_lineno, w_offset) -def parse_future(tree): - future_lineno = 0 - future_column = 0 - have_docstring = False - if isinstance(tree, ast.Module): - body = tree.body - elif isinstance(tree, ast.Interactive): - body = tree.body - else: - return 0, 0 - for stmt in body: - if isinstance(stmt, ast.Expr) and isinstance(stmt.value, ast.Str): - if have_docstring: - break - else: - have_docstring = True - elif isinstance(stmt, ast.ImportFrom): - if stmt.module == "__future__": - future_lineno = stmt.lineno - future_column = stmt.col_offset - else: - break - else: - break - return future_lineno, future_column - - class ForbiddenNameAssignment(Exception): def __init__(self, name, node): From commits-noreply at bitbucket.org Tue Mar 22 06:03:39 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 22 Mar 2011 06:03:39 +0100 (CET) Subject: [pypy-svn] pypy default: change a pair of range to xrange, to remove 1 guard each during annotation Message-ID: <20110322050339.C275B2A2002@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42838:c35afc91f814 Date: 2011-03-22 01:03 -0400 http://bitbucket.org/pypy/pypy/changeset/c35afc91f814/ Log: change a pair of range to xrange, to remove 1 guard each during annotation diff --git a/pypy/objspace/std/multimethod.py b/pypy/objspace/std/multimethod.py --- a/pypy/objspace/std/multimethod.py +++ b/pypy/objspace/std/multimethod.py @@ -516,7 +516,7 @@ test = 1 while True: self.ensure_length(test+len(array)) - for i in range(len(array)): + for i in xrange(len(array)): if not (array[i] == self.items[test+i] or array[i] == self.null_value or self.items[test+i] == self.null_value): diff --git a/pypy/objspace/flow/framestate.py b/pypy/objspace/flow/framestate.py --- a/pypy/objspace/flow/framestate.py +++ b/pypy/objspace/flow/framestate.py @@ -169,7 +169,7 @@ lst[i:i+1] = [tag] + vars def recursively_unflatten(space, lst): - for i in range(len(lst)-1, -1, -1): + for i in xrange(len(lst)-1, -1, -1): item = lst[i] if item in UNPICKLE_TAGS: unrollerclass, argcount = UNPICKLE_TAGS[item] From commits-noreply at bitbucket.org Tue Mar 22 13:46:11 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 13:46:11 +0100 (CET) Subject: [pypy-svn] pypy default: Fix large writes on win32 console Message-ID: <20110322124611.0E79B2A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42840:6a4033a99b58 Date: 2011-03-22 13:33 +0100 http://bitbucket.org/pypy/pypy/changeset/6a4033a99b58/ Log: Fix large writes on win32 console diff --git a/py/_io/terminalwriter.py b/py/_io/terminalwriter.py --- a/py/_io/terminalwriter.py +++ b/py/_io/terminalwriter.py @@ -81,6 +81,9 @@ oldcolors = GetConsoleInfo(handle).wAttributes attr |= (oldcolors & 0x0f0) SetConsoleTextAttribute(handle, attr) + while len(text) > 32768: + file.write(text[:32768]) + text = text[32768:] file.write(text) SetConsoleTextAttribute(handle, oldcolors) else: From commits-noreply at bitbucket.org Tue Mar 22 17:54:11 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 17:54:11 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: "import" now compiles code from the opened stream, Message-ID: <20110322165411.62988282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42841:7c3c75181dff Date: 2011-03-22 13:32 +0100 http://bitbucket.org/pypy/pypy/changeset/7c3c75181dff/ Log: "import" now compiles code from the opened stream, and does not need to read() the whole source file first. diff --git a/pypy/module/_file/interp_file.py b/pypy/module/_file/interp_file.py --- a/pypy/module/_file/interp_file.py +++ b/pypy/module/_file/interp_file.py @@ -55,6 +55,19 @@ if stream.flushable(): getopenstreams(self.space)[stream] = None + def detach(self): + stream = self.stream + if stream is not None: + self.newlines = self.stream.getnewlines() + self.stream = None + self.fd = -1 + openstreams = getopenstreams(self.space) + try: + del openstreams[stream] + except KeyError: + pass + return stream + def check_not_dir(self, fd): try: st = os.fstat(fd) @@ -128,17 +141,8 @@ self.fdopenstream(stream, fd, mode) def direct_close(self): - space = self.space - stream = self.stream + stream = self.detach() if stream is not None: - self.newlines = self.stream.getnewlines() - self.stream = None - self.fd = -1 - openstreams = getopenstreams(self.space) - try: - del openstreams[stream] - except KeyError: - pass stream.close() def direct_fileno(self): @@ -460,6 +464,12 @@ file.file_fdopen(fd, mode, buffering) return space.wrap(file) +def from_stream(space, stream, mode): + file = W_File(space) + fd = stream.try_to_find_file_descriptor() + file.fdopenstream(stream, fd, mode) + return space.wrap(file) + def descr_file_closed(space, file): return space.wrap(file.stream is None) @@ -553,4 +563,4 @@ @unwrap_spec(file=W_File, encoding="str_or_None", errors="str_or_None") def set_file_encoding(space, file, encoding=None, errors=None): file.encoding = encoding - file.errors = errors \ No newline at end of file + file.errors = errors diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -472,7 +472,7 @@ try: if find_info.modtype == PY_SOURCE: load_source_module(space, w_modulename, w_mod, find_info.filename, - find_info.stream.readall()) + find_info.stream) return w_mod elif find_info.modtype == PY_COMPILED: magic = _r_long(find_info.stream) @@ -732,6 +732,12 @@ pycode = ec.compiler.compile(source, pathname, 'exec', 0) return pycode +def parse_source_file_module(space, pathname, stream): + """ Parse a source file and return the corresponding code object """ + ec = space.getexecutioncontext() + pycode = ec.compiler.compile_file(stream, pathname, 'exec', 0) + return pycode + def exec_code_module(space, w_mod, code_w): w_dict = space.getattr(w_mod, space.wrap('__dict__')) space.call_method(w_dict, 'setdefault', @@ -741,7 +747,7 @@ @jit.dont_look_inside -def load_source_module(space, w_modulename, w_mod, pathname, source, +def load_source_module(space, w_modulename, w_mod, pathname, source_stream, write_pyc=True): """ Load a source module from a given file and return its module @@ -769,7 +775,7 @@ stream.close() space.setattr(w_mod, w('__file__'), w(cpathname)) else: - code_w = parse_source_module(space, pathname, source) + code_w = parse_source_file_module(space, pathname, source_stream) if space.config.objspace.usepycfiles and write_pyc: write_compiled_module(space, code_w, cpathname, mode, mtime) diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -101,7 +101,7 @@ w_modname, w(importing.Module(space, w_modname)), filename, - stream.readall()) + stream) finally: stream.close() if space.config.objspace.usepycfiles: @@ -667,7 +667,7 @@ w_modulename, w_mod, pathname, - stream.readall()) + stream) finally: stream.close() assert w_mod is w_ret @@ -690,7 +690,7 @@ w_modulename, w_mod, pathname, - stream.readall(), + stream, write_pyc=False) finally: stream.close() @@ -709,7 +709,7 @@ w_modulename, w_mod, pathname, - stream.readall()) + stream) except OperationError: # OperationError("Syntax Error") pass @@ -730,7 +730,7 @@ w_modulename, w_mod, pathname, - stream.readall()) + stream) except OperationError: # OperationError("NameError", "global name 'unknown_name' is not defined") pass diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -99,8 +99,7 @@ w_mod = space.wrap(Module(space, w_modulename)) importing._prepare_module(space, w_mod, filename, None) - importing.load_source_module( - space, w_modulename, w_mod, filename, stream.readall()) + importing.load_source_module(space, w_modulename, w_mod, filename, stream) if space.is_w(w_file, space.w_None): stream.close() return w_mod diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -34,25 +34,19 @@ return 'iso-8859-1' return encoding -def _check_for_encoding(s1, s2): - eol = s1.find('\n') +def _check_for_encoding(s): + eol = s.find('\n') if eol < 0: - enc = _check_line_for_encoding(s1) + enc = _check_line_for_encoding(s) else: - enc = _check_line_for_encoding(s1[:eol]) + enc = _check_line_for_encoding(s[:eol]) if enc: return enc if eol >= 0: - if s2: - s = s1 + s2 - else: - s = s1 eol2 = s.find('\n', eol + 1) if eol2 < 0: return _check_line_for_encoding(s[eol + 1:]) return _check_line_for_encoding(s[eol + 1:eol2]) - elif s2: - return _check_line_for_encoding(s2) def _check_line_for_encoding(line): @@ -97,10 +91,48 @@ class Stream(object): "Pseudo-file object used by PythonParser.parse_file" + def readline(self): raise NotImplementedError - def recode_to_utf8(self, text, encoding): - raise NotImplementedError + + encoding = None + def set_encoding(self, encoding): + self.encoding = encoding + + def close(self): + pass + + +class StdStream(Stream): + def __init__(self, space, stream): + self.space = space + self.stream = stream + self.w_readline = None + self.w_file = None + + def readline(self): + if not self.w_readline: + return self.stream.readline() + else: + w_line = self.space.call_function(self.w_readline) + return self.space.unicode_w(w_line).encode('utf-8') + + def set_encoding(self, encoding): + self.encoding = encoding + self.w_readline = None + if encoding: + from pypy.module._codecs.interp_codecs import lookup_codec + from pypy.module._file import interp_file + space = self.space + w_codec_tuple = lookup_codec(space, encoding) + self.w_file = interp_file.from_stream(space, self.stream, 'r') + w_stream_reader = space.getitem(w_codec_tuple, space.wrap(2)) + w_reader = space.call_function(w_stream_reader, self.w_file) + self.w_readline = space.getattr(w_reader, space.wrap('readline')) + + def close(self): + if self.w_file: + self.w_file.detach() class PythonParser(parser.Parser): @@ -108,25 +140,25 @@ parser.Parser.__init__(self, grammar) self.space = space - def _detect_encoding(self, text1, text2, compile_info): + def _detect_encoding(self, text, lineno, compile_info): "Detect source encoding from the beginning of the file" - if text1.startswith("\xEF\xBB\xBF"): - text1 = text1[3:] + if lineno == 1 and text.startswith("\xEF\xBB\xBF"): + text = text[3:] compile_info.encoding = 'utf-8' # If an encoding is explicitly given check that it is utf-8. - decl_enc = _check_for_encoding(text1, text2) + decl_enc = _check_for_encoding(text) if decl_enc and decl_enc != "utf-8": raise error.SyntaxError("UTF-8 BOM with non-utf8 coding cookie", filename=compile_info.filename) elif compile_info.flags & consts.PyCF_SOURCE_IS_UTF8: compile_info.encoding = 'utf-8' - if _check_for_encoding(text1, text2) is not None: + if _check_for_encoding(text) is not None: raise error.SyntaxError("coding declaration in unicode string", filename=compile_info.filename) else: compile_info.encoding = _normalize_encoding( - _check_for_encoding(text1, text2)) - return text1 + _check_for_encoding(text)) + return text def _decode_error(self, e, compile_info): space = self.space @@ -149,7 +181,7 @@ Everything from decoding the source to tokenizing to building the parse tree is handled here. """ - textsrc = self._detect_encoding(textsrc, None, compile_info) + textsrc = self._detect_encoding(textsrc, 1, compile_info) enc = compile_info.encoding if enc is not None and enc not in ('utf-8', 'iso-8859-1'): @@ -169,51 +201,35 @@ def parse_file(self, stream, compile_info): assert isinstance(stream, Stream) - firstline = stream.readline() - secondline = None - if firstline: - secondline = stream.readline() - if secondline: - firstline = self._detect_encoding( - firstline, secondline, compile_info) - else: - firstline = self._detect_encoding( - firstline, '', compile_info) + source_lines = [] + + while len(source_lines) < 2: + line = stream.readline() + if not line: + break + line = self._detect_encoding( + line, 1, compile_info) + source_lines.append(line) + if compile_info.encoding is not None: + break enc = compile_info.encoding if enc in ('utf-8', 'iso-8859-1'): enc = None # No need to recode + stream.set_encoding(enc) - source_lines = [] - - if enc is None: - if firstline: - source_lines.append(firstline) - if secondline: - source_lines.append(secondline) + try: while True: line = stream.readline() if not line: break source_lines.append(line) - else: - try: - if firstline: - source_lines.append(stream.recode_to_utf8(firstline, enc)) - if secondline: - source_lines.append(stream.recode_to_utf8(secondline, enc)) - - while True: - line = stream.readline() - if not line: - break - source_lines.append(stream.recode_to_utf8(line, enc)) - except OperationError, e: - operror = self._decode_error(e, compile_info) - if operror: - raise operror - else: - raise + except OperationError, e: + operror = self._decode_error(e, compile_info) + if operror: + raise operror + else: + raise return self.build_tree(source_lines, compile_info) diff --git a/pypy/interpreter/pyparser/test/test_pyparse.py b/pypy/interpreter/pyparser/test/test_pyparse.py --- a/pypy/interpreter/pyparser/test/test_pyparse.py +++ b/pypy/interpreter/pyparser/test/test_pyparse.py @@ -191,12 +191,12 @@ def __init__(self, source): self.stream = StringIO.StringIO(source) def readline(self): - return self.stream.readline() - def recode_to_utf8(self, line, encoding): + line = self.stream.readline() + if self.encoding is None: + return line + try: - if encoding is None or encoding in ('utf-8', 'iso-8859-1'): - return line - return line.decode(encoding).encode('utf-8') + return line.decode(self.encoding).encode('utf-8') except LookupError, e: raise OperationError(space.w_LookupError, space.wrap(e.message)) diff --git a/pypy/interpreter/pycompiler.py b/pypy/interpreter/pycompiler.py --- a/pypy/interpreter/pycompiler.py +++ b/pypy/interpreter/pycompiler.py @@ -153,8 +153,32 @@ e.wrap_info(space)) return mod + def _compile_file_to_ast(self, stream, info): + space = self.space + try: + stream = pyparse.StdStream(space, stream) + parse_tree = self.parser.parse_file(stream, info) + stream.close() + mod = astbuilder.ast_from_node(space, parse_tree, info) + f_flags, future_info = future.get_futures(self.future_flags, mod) + info.last_future_import = future_info + info.flags |= f_flags + except parseerror.IndentationError, e: + raise OperationError(space.w_IndentationError, + e.wrap_info(space)) + except parseerror.SyntaxError, e: + raise OperationError(space.w_SyntaxError, + e.wrap_info(space)) + return mod + def compile(self, source, filename, mode, flags, hidden_applevel=False): info = pyparse.CompileInfo(filename, mode, flags, hidden_applevel=hidden_applevel) mod = self._compile_to_ast(source, info) return self._compile_ast(mod, info) + + def compile_file(self, stream, filename, mode, flags, hidden_applevel=False): + info = pyparse.CompileInfo(filename, mode, flags, + hidden_applevel=hidden_applevel) + mod = self._compile_file_to_ast(stream, info) + return self._compile_ast(mod, info) From commits-noreply at bitbucket.org Tue Mar 22 17:54:12 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 17:54:12 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Translation fixes Message-ID: <20110322165412.7DE7F282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42842:3b5978d60efd Date: 2011-03-22 17:49 +0100 http://bitbucket.org/pypy/pypy/changeset/3b5978d60efd/ Log: Translation fixes diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -5,19 +5,25 @@ flags = 0 pos = (-1, 0) - if not isinstance(tree, (ast.Module, ast.Interactive)): + if isinstance(tree, ast.Module): + stmts = tree.body + elif isinstance(tree, ast.Interactive): + stmts = tree.body + else: return flags, pos - if not tree.body: + if stmts is None: return flags, pos found_docstring = False - for elem in tree.body: + for elem in stmts: if isinstance(elem, ast.ImportFrom): if elem.module != '__future__': break for alias in elem.names: + if not isinstance(alias, ast.alias): + continue name = alias.name try: flags |= future_flags.compiler_features[name] diff --git a/pypy/module/_file/interp_file.py b/pypy/module/_file/interp_file.py --- a/pypy/module/_file/interp_file.py +++ b/pypy/module/_file/interp_file.py @@ -470,6 +470,10 @@ file.fdopenstream(stream, fd, mode) return space.wrap(file) +def detach_stream(space, w_file): + file = space.interp_w(W_File, w_file) + return file.detach() + def descr_file_closed(space, file): return space.wrap(file.stream is None) diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -2,6 +2,8 @@ from pypy.interpreter.error import OperationError from pypy.interpreter.pyparser import parser, pytokenizer, pygram, error from pypy.interpreter.astcompiler import consts +from pypy.interpreter.unicodehelper import PyUnicode_EncodeUTF8 + _recode_to_utf8 = gateway.applevel(r''' @@ -115,7 +117,8 @@ return self.stream.readline() else: w_line = self.space.call_function(self.w_readline) - return self.space.unicode_w(w_line).encode('utf-8') + return PyUnicode_EncodeUTF8(self.space, + self.space.unicode_w(w_line)) def set_encoding(self, encoding): self.encoding = encoding @@ -132,12 +135,15 @@ def close(self): if self.w_file: - self.w_file.detach() + from pypy.module._file import interp_file + interp_file.detach_stream(self.space, self.w_file) class PythonParser(parser.Parser): - def __init__(self, space, grammar=pygram.python_grammar): - parser.Parser.__init__(self, grammar) + IMPORT_FROM = pygram.python_grammar.symbol_ids['import_from'] + + def __init__(self, space): + parser.Parser.__init__(self, pygram.python_grammar) self.space = space def _detect_encoding(self, text, lineno, compile_info): @@ -234,7 +240,7 @@ return self.build_tree(source_lines, compile_info) def parse_future_import(self, node): - if node.type != self.grammar.symbol_ids['import_from']: + if node.type != self.IMPORT_FROM: return children = node.children # from __future__ import ..., must have at least 4 children @@ -256,9 +262,11 @@ for i in range(0, len(child.children), 2): c = child.children[i] - if (len(c.children) >= 1 and - c.children[0].type == pygram.tokens.NAME): - name = c.children[0].value + if (len(c.children) == 0 or + c.children[0].type != pygram.tokens.NAME): + continue + + name = c.children[0].value if name == 'print_function': self.compile_info.flags |= consts.CO_FUTURE_PRINT_FUNCTION From commits-noreply at bitbucket.org Tue Mar 22 18:07:11 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Tue, 22 Mar 2011 18:07:11 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: add greenlet to the docs since we dont include it any more Message-ID: <20110322170711.04186282B9D@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42843:3ad80bfd18fc Date: 2011-03-22 17:41 +0100 http://bitbucket.org/pypy/pypy/changeset/3ad80bfd18fc/ Log: add greenlet to the docs since we dont include it any more diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -39,13 +39,15 @@ [user at debian-box ~]$ sudo apt-get install \ gcc make python-dev libffi-dev pkg-config \ - libz-dev libbz2-dev libncurses-dev libexpat1-dev libssl-dev libgc-dev python-sphinx + libz-dev libbz2-dev libncurses-dev libexpat1-dev \ + libssl-dev libgc-dev python-sphinx python-greenlet On a Fedora box these are:: [user at fedora-or-rh-box ~]$ sudo yum install \ gcc make python-devel libffi-devel pkg-config \ - zlib-devel bzip2-devel ncurses-devel expat-devel openssl-devel gc-devel python-sphinx + zlib-devel bzip2-devel ncurses-devel expat-devel \ + openssl-devel gc-devel python-sphinx python-greenlet The above command lines are split with continuation characters, giving the necessary dependencies first, then the optional ones. @@ -57,6 +59,7 @@ * ``libssl-dev`` (for the optional ``_ssl`` module) * ``libgc-dev`` (for the Boehm garbage collector: only needed when translating with `--opt=0, 1` or `size`) * ``python-sphinx`` (for the optional documentation build) + * ``python-greenlet`` (for the optional stackless support in interpreted mode/testing) 2. Translation is somewhat time-consuming (30 min to over one hour) and RAM-hungry. If you have less than 1.5 GB of From commits-noreply at bitbucket.org Tue Mar 22 18:35:51 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Tue, 22 Mar 2011 18:35:51 +0100 (CET) Subject: [pypy-svn] pypy subrepo-removal: finish the subrepo-removal branch Message-ID: <20110322173551.1EFE5282B9D@codespeak.net> Author: Ronny Pfannschmidt Branch: subrepo-removal Changeset: r42844:45dd932f506c Date: 2011-03-22 18:32 +0100 http://bitbucket.org/pypy/pypy/changeset/45dd932f506c/ Log: finish the subrepo-removal branch From commits-noreply at bitbucket.org Tue Mar 22 18:35:52 2011 From: commits-noreply at bitbucket.org (RonnyPfannschmidt) Date: Tue, 22 Mar 2011 18:35:52 +0100 (CET) Subject: [pypy-svn] pypy default: merge subrepo-removal to default Message-ID: <20110322173552.0B0C1282B9D@codespeak.net> Author: Ronny Pfannschmidt Branch: Changeset: r42845:575aecded05b Date: 2011-03-22 18:34 +0100 http://bitbucket.org/pypy/pypy/changeset/575aecded05b/ Log: merge subrepo-removal to default diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl From commits-noreply at bitbucket.org Tue Mar 22 19:38:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 19:38:01 +0100 (CET) Subject: [pypy-svn] pypy default: (vincele) issue654: fix for test_incorrect_code_name in test_import.py Message-ID: <20110322183801.51E14282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42846:f0ac0762752d Date: 2011-03-22 19:31 +0100 http://bitbucket.org/pypy/pypy/changeset/f0ac0762752d/ Log: (vincele) issue654: fix for test_incorrect_code_name in test_import.py Code objects loaded from a .pyc file are updated to reflect the actual file name, regardless of the location of the file when it was originally compiled. diff --git a/pypy/module/imp/test/test_app.py b/pypy/module/imp/test/test_app.py --- a/pypy/module/imp/test/test_app.py +++ b/pypy/module/imp/test/test_app.py @@ -138,3 +138,58 @@ ) # Doesn't end up in there when run with -A assert sys.path_importer_cache.get(lib_pypy) is None + + def test_rewrite_pyc_check_code_name(self): + # This one is adapted from cpython's Lib/test/test_import.py + from os import chmod + from os.path import join + from sys import modules, path + from shutil import rmtree + from tempfile import mkdtemp + code = """if 1: + import sys + code_filename = sys._getframe().f_code.co_filename + module_filename = __file__ + constant = 1 + def func(): + pass + func_filename = func.func_code.co_filename + """ + + module_name = "unlikely_module_name" + dir_name = mkdtemp(prefix='pypy_test') + file_name = join(dir_name, module_name + '.py') + with open(file_name, "wb") as f: + f.write(code) + compiled_name = file_name + ("c" if __debug__ else "o") + chmod(file_name, 0777) + + # Setup + sys_path = path[:] + orig_module = modules.pop(module_name, None) + assert modules.get(module_name) == None + path.insert(0, dir_name) + + # Test + import py_compile + py_compile.compile(file_name, dfile="another_module.py") + __import__(module_name, globals(), locals()) + mod = modules.get(module_name) + + try: + # Ensure proper results + assert mod != orig_module + assert mod.module_filename == compiled_name + assert mod.code_filename == file_name + assert mod.func_filename == file_name + finally: + # TearDown + path[:] = sys_path + if orig_module is not None: + modules[module_name] = orig_module + else: + try: + del modules[module_name] + except KeyError: + pass + rmtree(dir_name, True) diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -10,6 +10,7 @@ from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.eval import Code +from pypy.interpreter.pycode import PyCode from pypy.rlib import streamio, jit, rposix from pypy.rlib.streamio import StreamErrors from pypy.rlib.rarithmetic import intmask @@ -774,10 +775,22 @@ if space.config.objspace.usepycfiles and write_pyc: write_compiled_module(space, code_w, cpathname, mode, mtime) + update_code_filenames(space, code_w, pathname, code_w.co_filename) exec_code_module(space, w_mod, code_w) return w_mod +def update_code_filenames(space, code_w, pathname, oldname): + assert isinstance(code_w, PyCode) + if code_w.co_filename != oldname: + return + + code_w.co_filename = pathname + constants = code_w.co_consts_w + for const in constants: + if const is not None and isinstance(const, PyCode): + update_code_filenames(space, const, pathname, oldname) + def _get_long(s): a = ord(s[0]) b = ord(s[1]) From commits-noreply at bitbucket.org Tue Mar 22 19:55:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 19:55:57 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Merge default Message-ID: <20110322185557.38CE3282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42847:ebea75507188 Date: 2011-03-22 19:43 +0100 http://bitbucket.org/pypy/pypy/changeset/ebea75507188/ Log: Merge default diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -10,6 +10,7 @@ from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.eval import Code +from pypy.interpreter.pycode import PyCode from pypy.rlib import streamio, jit, rposix from pypy.rlib.streamio import StreamErrors from pypy.rlib.rarithmetic import intmask @@ -780,10 +781,22 @@ if space.config.objspace.usepycfiles and write_pyc: write_compiled_module(space, code_w, cpathname, mode, mtime) + update_code_filenames(space, code_w, pathname, code_w.co_filename) exec_code_module(space, w_mod, code_w) return w_mod +def update_code_filenames(space, code_w, pathname, oldname): + assert isinstance(code_w, PyCode) + if code_w.co_filename != oldname: + return + + code_w.co_filename = pathname + constants = code_w.co_consts_w + for const in constants: + if const is not None and isinstance(const, PyCode): + update_code_filenames(space, const, pathname, oldname) + def _get_long(s): a = ord(s[0]) b = ord(s[1]) From commits-noreply at bitbucket.org Tue Mar 22 19:55:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 19:55:57 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Fix one test in module/_ast Message-ID: <20110322185557.D792B282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42848:8abe7f2848bc Date: 2011-03-22 19:50 +0100 http://bitbucket.org/pypy/pypy/changeset/8abe7f2848bc/ Log: Fix one test in module/_ast diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -35,6 +35,8 @@ break if isinstance(elem.value, ast.Str): found_docstring = True + else: + break return flags, pos class FutureFlags(object): From commits-noreply at bitbucket.org Tue Mar 22 19:55:58 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 19:55:58 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Use a slightly more robust code structure Message-ID: <20110322185558.6BD11282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42849:f3c8ea0715ef Date: 2011-03-22 19:53 +0100 http://bitbucket.org/pypy/pypy/changeset/f3c8ea0715ef/ Log: Use a slightly more robust code structure diff --git a/pypy/interpreter/pyparser/future.py b/pypy/interpreter/pyparser/future.py --- a/pypy/interpreter/pyparser/future.py +++ b/pypy/interpreter/pyparser/future.py @@ -30,13 +30,12 @@ except KeyError: pass pos = elem.lineno, elem.col_offset + continue elif isinstance(elem, ast.Expr): - if found_docstring: - break - if isinstance(elem.value, ast.Str): + if not found_docstring and isinstance(elem.value, ast.Str): found_docstring = True - else: - break + continue + break return flags, pos class FutureFlags(object): From commits-noreply at bitbucket.org Tue Mar 22 22:04:45 2011 From: commits-noreply at bitbucket.org (fijal) Date: Tue, 22 Mar 2011 22:04:45 +0100 (CET) Subject: [pypy-svn] pypy numpy-exp: in-progress. Start implementing vector operations. It's a bit annoying because Message-ID: <20110322210445.AAE8E282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: numpy-exp Changeset: r42850:e583fdf3b8b1 Date: 2011-03-22 15:03 -0600 http://bitbucket.org/pypy/pypy/changeset/e583fdf3b8b1/ Log: in-progress. Start implementing vector operations. It's a bit annoying because we don't (can't?) have a type representing sse vector while not jitted so a bit of dance is required. diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py --- a/pypy/jit/metainterp/history.py +++ b/pypy/jit/metainterp/history.py @@ -15,6 +15,7 @@ INT = 'i' REF = 'r' FLOAT = 'f' +VECTOR = 'F' HOLE = '_' VOID = 'v' @@ -508,6 +509,9 @@ def forget_value(self): raise NotImplementedError +class BoxVector(Box): + _attrs_ = () + class BoxInt(Box): type = INT _attrs_ = ('value',) diff --git a/pypy/jit/backend/llgraph/llimpl.py b/pypy/jit/backend/llgraph/llimpl.py --- a/pypy/jit/backend/llgraph/llimpl.py +++ b/pypy/jit/backend/llgraph/llimpl.py @@ -161,7 +161,6 @@ 'force_token' : ((), 'int'), 'call_may_force' : (('int', 'varargs'), 'intorptr'), 'guard_not_forced': ((), None), - 'sse_float_add' : (('int', 'int', 'int', 'int'), None), } # ____________________________________________________________ @@ -736,12 +735,6 @@ op_getarrayitem_raw_pure = op_getarrayitem_raw - def op_sse_float_add(self, arraydescr, array1, array2, arrayres, - index): - from pypy.jit.metainterp.blackhole import BlackholeInterpreter - return BlackholeInterpreter.bhimpl_sse_float_add.im_func(self.cpu, - arraydescr, array1, array2, arrayres, index) - def op_getfield_gc(self, fielddescr, struct): if fielddescr.typeinfo == REF: return do_getfield_gc_ptr(struct, fielddescr.ofs) diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -394,11 +394,6 @@ opimpl_getarrayitem_gc_r = _opimpl_getarrayitem_gc_any opimpl_getarrayitem_gc_f = _opimpl_getarrayitem_gc_any - @arguments("descr", "box", "box", "box", "box") - def opimpl_sse_float_add(self, arraydescr, array1, array2, arrayres, index): - return self.execute_with_descr(rop.SSE_FLOAT_ADD, arraydescr, array1, - array2, arrayres, index) - @arguments("box", "descr", "box") def _opimpl_getarrayitem_raw_any(self, arraybox, arraydescr, indexbox): return self.execute_with_descr(rop.GETARRAYITEM_RAW, diff --git a/pypy/jit/codewriter/test/test_jtransform.py b/pypy/jit/codewriter/test/test_jtransform.py --- a/pypy/jit/codewriter/test/test_jtransform.py +++ b/pypy/jit/codewriter/test/test_jtransform.py @@ -947,3 +947,7 @@ assert op1.args[1] == 'calldescr-%d' % effectinfo.EffectInfo.OS_ARRAYCOPY assert op1.args[2] == ListOfKind('int', [v3, v4, v5]) assert op1.args[3] == ListOfKind('ref', [v1, v2]) + +def test_vector_ops(): + TP = lltype.Array(lltype.Float, hints={'nolength': True}) + diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -578,10 +578,6 @@ def op_shrink_array(array, smallersize): return False -def op_sse_float_add(arr1, arr2, arr_res, index): - arr_res[index] = arr1[index] + arr2[index] - arr_res[index + 1] = arr1[index + 1] + arr2[index + 1] - # ____________________________________________________________ def get_op_impl(opname): diff --git a/pypy/rlib/rvector.py b/pypy/rlib/rvector.py new file mode 100644 --- /dev/null +++ b/pypy/rlib/rvector.py @@ -0,0 +1,31 @@ + +from pypy.rpython.extregistry import ExtRegistryEntry + +class VectorContainer(object): + """ Class that is a container for multiple float/int objects. + Can be represented at jit-level by a single register, like xmm + on x86 architecture + """ + +class FloatVectorContainer(VectorContainer): + """ A container for float values + """ + def __init__(self, val1, val2): + self.v1 = val1 + self.v2 = val2 + + def __repr__(self): + return '' % (self.v1, self.v2) + +def vector_float_read(arr, index): + return FloatVectorContainer(arr[index], arr[index + 1]) +vector_float_read.oopspec = 'vector_float_read(arr, index)' + +def vector_float_write(arr, index, container): + arr[index] = container.v1 + arr[index + 1] = container.v2 +vector_float_write.oopspec = 'vector_from_write(arr, index, container)' + +def vector_float_add(left, right): + return FloatVectorContainer(left.v1 + right.v1, left.v2 + right.v2) +vector_float_add.oopspec = 'vector_float_add(left, right)' diff --git a/pypy/jit/metainterp/executor.py b/pypy/jit/metainterp/executor.py --- a/pypy/jit/metainterp/executor.py +++ b/pypy/jit/metainterp/executor.py @@ -125,17 +125,6 @@ else: cpu.bh_setarrayitem_raw_i(arraydescr, array, index, itembox.getint()) -def do_sse_float_add(cpu, _, array1, array2, arrayres, indexbox, arraydescr): - onebox = do_getarrayitem_raw(cpu, _, array1, indexbox, arraydescr) - twobox = do_getarrayitem_raw(cpu, _, array2, indexbox, arraydescr) - res = onebox.getfloat() + twobox.getfloat() - do_setarrayitem_raw(cpu, _, arrayres, indexbox, BoxFloat(res), arraydescr) - indexbox = BoxInt(indexbox.getint() + 1) - onebox = do_getarrayitem_raw(cpu, _, array1, indexbox, arraydescr) - twobox = do_getarrayitem_raw(cpu, _, array2, indexbox, arraydescr) - res = onebox.getfloat() + twobox.getfloat() - do_setarrayitem_raw(cpu, _, arrayres, indexbox, BoxFloat(res), arraydescr) - def do_getfield_gc(cpu, _, structbox, fielddescr): struct = structbox.getref_base() if fielddescr.is_pointer_field(): diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py --- a/pypy/jit/metainterp/blackhole.py +++ b/pypy/jit/metainterp/blackhole.py @@ -1069,15 +1069,6 @@ def bhimpl_setarrayitem_raw_f(cpu, array, arraydescr, index, newvalue): cpu.bh_setarrayitem_raw_f(arraydescr, array, index, newvalue) - @arguments("cpu", "d", "i", "i", "i", "i") - def bhimpl_sse_float_add(cpu, arraydescr, array1, array2, array_res, index): - one = cpu.bh_getarrayitem_raw_f(arraydescr, array1, index) - two = cpu.bh_getarrayitem_raw_f(arraydescr, array2, index) - cpu.bh_setarrayitem_raw_f(arraydescr, array_res, index, one + two) - one = cpu.bh_getarrayitem_raw_f(arraydescr, array1, index + 1) - two = cpu.bh_getarrayitem_raw_f(arraydescr, array2, index + 1) - cpu.bh_setarrayitem_raw_f(arraydescr, array_res, index + 1, one + two) - # note, there is no 'r' here, since it can't happen @arguments("cpu", "r", "d", returns="i") diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -441,10 +441,6 @@ 'get_write_barrier_from_array_failing_case': LLOp(sideeffects=False), 'gc_get_type_info_group': LLOp(sideeffects=False), - # __________ vectorization ops _______ - - 'sse_float_add': LLOp(canrun=True), - # __________ GC operations __________ 'gc__collect': LLOp(canunwindgc=True), diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py --- a/pypy/jit/codewriter/jtransform.py +++ b/pypy/jit/codewriter/jtransform.py @@ -351,6 +351,8 @@ prepare = self._handle_jit_call elif oopspec_name.startswith('libffi_'): prepare = self._handle_libffi_call + elif oopspec_name.startswith('vector_'): + prepare = self._handle_vector_op else: prepare = self.prepare_builtin_call try: @@ -476,14 +478,6 @@ return self._do_builtin_call(op, 'raw_free', [op.args[0]], extra = (ARRAY,), extrakey = ARRAY) - def rewrite_op_sse_float_add(self, op): - ARRAY = op.args[0].concretetype.TO - arraydescr = self.cpu.arraydescrof(ARRAY) - kind = getkind(op.result.concretetype) - assert kind == 'void' - return SpaceOperation('sse_float_add', - [arraydescr] + op.args, op.result) - def rewrite_op_getarrayitem(self, op): ARRAY = op.args[0].concretetype.TO if self._array_of_voids(ARRAY): @@ -1359,6 +1353,17 @@ assert False, 'unsupported oopspec: %s' % oopspec_name return self._handle_oopspec_call(op, args, oopspecindex, extraeffect) + # ---------- + # vector ops + + def _handle_vector_op(self, op, oopspec_name, args): + if oopspec_name in ['vector_float_read', + 'vector_float_write', + 'vector_float_add']: + return SpaceOperation(oopspec_name, op.args, op.result) + else: + raise NotSupported(oopspec_name) + def rewrite_op_jit_force_virtual(self, op): return self._do_builtin_call(op) diff --git a/pypy/rlib/test/test_rvector.py b/pypy/rlib/test/test_rvector.py new file mode 100644 --- /dev/null +++ b/pypy/rlib/test/test_rvector.py @@ -0,0 +1,56 @@ + +from pypy.rlib.rvector import (vector_float_read, vector_float_write, + vector_float_add) +from pypy.rpython.lltypesystem import lltype +from pypy.rpython.test.test_llinterp import interpret + +TP = lltype.Array(lltype.Float, hints={'nolength': True}) + +class TestRVector(object): + def test_direct_add(self): + a = lltype.malloc(TP, 16, flavor='raw') + b = lltype.malloc(TP, 16, flavor='raw') + res = lltype.malloc(TP, 16, flavor='raw') + a[0] = 1.2 + a[1] = 1.3 + b[0] = 0.1 + b[1] = 0.3 + a[10] = 8.3 + a[11] = 8.1 + b[10] = 7.8 + b[11] = 7.6 + f1 = vector_float_read(a, 0) + f2 = vector_float_read(b, 0) + vector_float_write(res, 2, vector_float_add(f1, f2)) + assert res[2] == 1.2 + 0.1 + assert res[3] == 1.3 + 0.3 + f1 = vector_float_read(a, 10) + f2 = vector_float_read(b, 10) + vector_float_write(res, 8, vector_float_add(f1, f2)) + assert res[8] == 8.3 + 7.8 + assert res[9] == 8.1 + 7.6 + lltype.free(a, flavor='raw') + lltype.free(b, flavor='raw') + lltype.free(res, flavor='raw') + + def test_interpret(self): + def f(): + a = lltype.malloc(TP, 16, flavor='raw') + b = lltype.malloc(TP, 16, flavor='raw') + res = lltype.malloc(TP, 16, flavor='raw') + try: + a[0] = 1.2 + a[1] = 1.3 + b[0] = 0.1 + b[1] = 0.3 + f1 = vector_float_read(a, 0) + f2 = vector_float_read(b, 0) + vector_float_write(res, 8, vector_float_add(f1, f2)) + return res[8] * 100 + res[9] + finally: + lltype.free(a, flavor='raw') + lltype.free(b, flavor='raw') + lltype.free(res, flavor='raw') + + res = interpret(f, []) + assert res == f() diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5515,9 +5515,6 @@ # not obvious, because of the exception UnicodeDecodeError that # can be raised by ll_str2unicode() - - - ##class TestOOtype(OptimizeOptTest, OOtypeMixin): ## def test_instanceof(self): diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -466,7 +466,6 @@ 'SETARRAYITEM_RAW/3d', 'SETFIELD_GC/2d', 'SETFIELD_RAW/2d', - 'SSE_FLOAT_ADD/4d', 'STRSETITEM/3', 'UNICODESETITEM/3', #'RUNTIMENEW/1', # ootype operation From commits-noreply at bitbucket.org Tue Mar 22 23:45:11 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 23:45:11 +0100 (CET) Subject: [pypy-svn] pypy compile-from-stream: Fix a systematic crash in translated code Message-ID: <20110322224511.DF669282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: compile-from-stream Changeset: r42851:05a2c780a660 Date: 2011-03-22 23:29 +0100 http://bitbucket.org/pypy/pypy/changeset/05a2c780a660/ Log: Fix a systematic crash in translated code diff --git a/pypy/interpreter/pyparser/pyparse.py b/pypy/interpreter/pyparser/pyparse.py --- a/pypy/interpreter/pyparser/pyparse.py +++ b/pypy/interpreter/pyparser/pyparse.py @@ -220,7 +220,9 @@ break enc = compile_info.encoding - if enc in ('utf-8', 'iso-8859-1'): + if enc is None: + pass + elif enc in ('utf-8', 'iso-8859-1'): enc = None # No need to recode stream.set_encoding(enc) From commits-noreply at bitbucket.org Tue Mar 22 23:45:12 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Tue, 22 Mar 2011 23:45:12 +0100 (CET) Subject: [pypy-svn] pypy default: Try to remove thousands of warnings saying "prebuilt instance has no attribute 'co_filename' Message-ID: <20110322224512.84B96282B9D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42852:8560e39f47b1 Date: 2011-03-22 23:44 +0100 http://bitbucket.org/pypy/pypy/changeset/8560e39f47b1/ Log: Try to remove thousands of warnings saying "prebuilt instance has no attribute 'co_filename' diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -775,14 +775,16 @@ if space.config.objspace.usepycfiles and write_pyc: write_compiled_module(space, code_w, cpathname, mode, mtime) - update_code_filenames(space, code_w, pathname, code_w.co_filename) + update_code_filenames(space, code_w, pathname) exec_code_module(space, w_mod, code_w) return w_mod -def update_code_filenames(space, code_w, pathname, oldname): +def update_code_filenames(space, code_w, pathname, oldname=None): assert isinstance(code_w, PyCode) - if code_w.co_filename != oldname: + if oldname is None: + oldname = code_w.co_filename + elif code_w.co_filename != oldname: return code_w.co_filename = pathname From commits-noreply at bitbucket.org Wed Mar 23 00:37:10 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 00:37:10 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Merge default Message-ID: <20110322233710.988DD36C203@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42853:ba198d73c419 Date: 2011-03-22 17:33 -0600 http://bitbucket.org/pypy/pypy/changeset/ba198d73c419/ Log: Merge default diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/module/pypyjit/interp_jit.py b/pypy/module/pypyjit/interp_jit.py --- a/pypy/module/pypyjit/interp_jit.py +++ b/pypy/module/pypyjit/interp_jit.py @@ -6,7 +6,7 @@ from pypy.tool.pairtype import extendabletype from pypy.rlib.rarithmetic import r_uint, intmask from pypy.rlib.jit import JitDriver, hint, we_are_jitted, dont_look_inside -from pypy.rlib.jit import current_trace_length +from pypy.rlib.jit import current_trace_length, unroll_parameters import pypy.interpreter.pyopcode # for side-effects from pypy.interpreter.error import OperationError, operationerrfmt from pypy.interpreter.pycode import PyCode, CO_GENERATOR @@ -138,12 +138,17 @@ raise OperationError(space.w_ValueError, space.wrap("error in JIT parameters string")) for key, w_value in kwds_w.items(): - intval = space.int_w(w_value) - try: - pypyjitdriver.set_param(key, intval) - except ValueError: - raise operationerrfmt(space.w_TypeError, - "no JIT parameter '%s'", key) + if key == 'enable_opts': + pypyjitdriver.set_param('enable_opts', space.str_w(w_value)) + else: + intval = space.int_w(w_value) + for name, _ in unroll_parameters: + if name == key and name != 'enable_opts': + pypyjitdriver.set_param(name, intval) + break + else: + raise operationerrfmt(space.w_TypeError, + "no JIT parameter '%s'", key) @dont_look_inside def residual_call(space, w_callable, __args__): diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - -.. sectnum:: -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: -.. sectnum:: - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,237 +0,0 @@ -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,59 +0,0 @@ - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/jit/codewriter/longlong.py b/pypy/jit/codewriter/longlong.py --- a/pypy/jit/codewriter/longlong.py +++ b/pypy/jit/codewriter/longlong.py @@ -40,7 +40,7 @@ getfloatstorage = longlong2float.float2longlong getrealfloat = longlong2float.longlong2float - gethash = lambda xll: xll - (xll >> 32) + gethash = lambda xll: rarithmetic.intmask(xll - (xll >> 32)) is_longlong = lambda TYPE: (TYPE == lltype.SignedLongLong or TYPE == lltype.UnsignedLongLong) diff --git a/pypy/doc/jit/index.txt b/pypy/doc/jit/index.txt deleted file mode 100644 --- a/pypy/doc/jit/index.txt +++ /dev/null @@ -1,26 +0,0 @@ -======================================================================== - JIT documentation -======================================================================== - -:abstract: - - When PyPy is translated into an executable like ``pypy-c``, the - executable contains a full virtual machine that can optionally - include a Just-In-Time compiler. This JIT compiler is **generated - automatically from the interpreter** that we wrote in RPython. - - This JIT Compiler Generator can be applied on interpreters for any - language, as long as the interpreter itself is written in RPython - and contains a few hints to guide the JIT Compiler Generator. - - -Content ------------------------------------------------------------- - -- Overview_: motivating our approach - -- Notes_ about the current work in PyPy - - -.. _Overview: overview.html -.. _Notes: pyjitpl5.html diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -1,7 +1,7 @@ import py import sys from pypy.rlib.jit import JitDriver, we_are_jitted, hint, dont_look_inside -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_SIMPLE, loop_invariant +from pypy.rlib.jit import loop_invariant from pypy.rlib.jit import jit_debug, assert_green, AssertGreenFailed from pypy.rlib.jit import unroll_safe, current_trace_length from pypy.jit.metainterp.warmspot import ll_meta_interp, get_stats @@ -15,11 +15,11 @@ from pypy.jit.metainterp.typesystem import LLTypeHelper, OOTypeHelper from pypy.rpython.lltypesystem import lltype, llmemory from pypy.rpython.ootypesystem import ootype +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT def _get_jitcodes(testself, CPUClass, func, values, type_system, supports_longlong=False, **kwds): from pypy.jit.codewriter import support, codewriter - from pypy.jit.metainterp import simple_optimize class FakeJitCell: __compiled_merge_points = [] @@ -37,11 +37,8 @@ return self._cell _cell = FakeJitCell() - # pick the optimizer this way - optimize_loop = staticmethod(simple_optimize.optimize_loop) - optimize_bridge = staticmethod(simple_optimize.optimize_bridge) - trace_limit = sys.maxint + enable_opts = ALL_OPTS_DICT func._jit_unroll_safe_ = True rtyper = support.annotate(func, values, type_system=type_system) @@ -1176,7 +1173,7 @@ x += inst.foo n -= 1 return x - res = self.meta_interp(f, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [20], enable_opts='') assert res == f(20) self.check_loops(call=0) @@ -1379,8 +1376,7 @@ m = m >> 1 return x - res = self.meta_interp(f, [50, 1], - optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [50, 1], enable_opts='') assert res == 42 def test_set_param(self): @@ -2323,12 +2319,12 @@ res = self.meta_interp(f, [1, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert not res res = self.meta_interp(f, [0, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res class BaseLLtypeTests(BasicTests): @@ -2408,5 +2404,25 @@ self.meta_interp(main, []) + def test_enable_opts(self): + jitdriver = JitDriver(greens = [], reds = ['a']) + + class A(object): + def __init__(self, i): + self.i = i + + def f(): + a = A(0) + + while a.i < 10: + jitdriver.jit_merge_point(a=a) + jitdriver.can_enter_jit(a=a) + a = A(a.i + 1) + + self.meta_interp(f, []) + self.check_loops(new_with_vtable=0) + self.meta_interp(f, [], enable_opts='') + self.check_loops(new_with_vtable=1) + class TestLLtype(BaseLLtypeTests, LLJitMixin): pass diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -11,7 +11,7 @@ from pypy.rlib.objectmodel import we_are_translated from pypy.rlib import rgc from pypy.jit.backend.llsupport import symbolic -from pypy.jit.backend.x86.jump import remap_frame_layout +from pypy.jit.backend.x86.jump import remap_frame_layout_mixed from pypy.jit.codewriter import heaptracker, longlong from pypy.jit.codewriter.effectinfo import EffectInfo from pypy.jit.metainterp.resoperation import rop @@ -110,6 +110,12 @@ return StackLoc(i, get_ebp_ofs(i+1), 2, box_type) else: return StackLoc(i, get_ebp_ofs(i), 1, box_type) + @staticmethod + def frame_size(box_type): + if IS_X86_32 and box_type == FLOAT: + return 2 + else: + return 1 class RegAlloc(object): @@ -1195,16 +1201,17 @@ xmmtmploc = self.xrm.force_allocate_reg(box1, selected_reg=xmmtmp) # Part about non-floats # XXX we don't need a copy, we only just the original list - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations1 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type != FLOAT] assert tmploc not in nonfloatlocs - dst_locations = [loc for loc in nonfloatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, tmploc) + dst_locations1 = [loc for loc in nonfloatlocs if loc is not None] # Part about floats - src_locations = [self.loc(op.getarg(i)) for i in range(op.numargs()) + src_locations2 = [self.loc(op.getarg(i)) for i in range(op.numargs()) if op.getarg(i).type == FLOAT] - dst_locations = [loc for loc in floatlocs if loc is not None] - remap_frame_layout(assembler, src_locations, dst_locations, xmmtmp) + dst_locations2 = [loc for loc in floatlocs if loc is not None] + remap_frame_layout_mixed(assembler, + src_locations1, dst_locations1, tmploc, + src_locations2, dst_locations2, xmmtmp) self.rm.possibly_free_var(box) self.xrm.possibly_free_var(box1) self.possibly_free_vars_for_op(op) diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,123 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. contents:: -.. sectnum:: - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/conftest.py b/pypy/doc/conftest.py deleted file mode 100644 --- a/pypy/doc/conftest.py +++ /dev/null @@ -1,29 +0,0 @@ -import py - -from pypy.config.makerestdoc import register_config_role -docdir = py.path.local(__file__).dirpath() - -pytest_plugins = "pypy.doc.pytest_restdoc" - -def pytest_addoption(parser): - group = parser.getgroup("pypy-doc options") - group.addoption('--pypy-doctests', action="store_true", - dest="pypy_doctests", default=False, - help="enable doctests in .txt files") - group.addoption('--generate-redirections', - action="store_true", dest="generateredirections", - default=True, help="Generate redirecting HTML files") - -def pytest_configure(config): - register_config_role(docdir) - -def pytest_doctest_prepare_content(content): - if not py.test.config.getvalue("pypy_doctests"): - py.test.skip("specify --pypy-doctests to run doctests") - l = [] - for line in content.split("\n"): - if line.find('>>>>') != -1: - line = "" - l.append(line) - return "\n".join(l) - diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: -.. sectnum:: - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/jit/metainterp/resoperation.py b/pypy/jit/metainterp/resoperation.py --- a/pypy/jit/metainterp/resoperation.py +++ b/pypy/jit/metainterp/resoperation.py @@ -486,7 +486,6 @@ #'OOSEND', # ootype operation #'OOSEND_PURE', # ootype operation 'CALL_PURE/*d', # removed before it's passed to the backend - # CALL_PURE(result, func, arg_1,..,arg_n) '_CALL_LAST', '_CANRAISE_LAST', # ----- end of can_raise operations ----- diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -21,7 +21,7 @@ from pypy.rlib.objectmodel import specialize from pypy.jit.codewriter.jitcode import JitCode, SwitchDictDescr, MissingLiveness from pypy.jit.codewriter import heaptracker, longlong -from pypy.jit.metainterp.optimizeutil import RetraceLoop +from pypy.jit.metainterp.optimizeutil import RetraceLoop, args_dict_box, args_dict # ____________________________________________________________ @@ -1279,11 +1279,6 @@ self._addr2name_keys = [key for key, value in list_of_addr2name] self._addr2name_values = [value for key, value in list_of_addr2name] - def setup_jitdrivers_sd(self, optimizer): - if optimizer is not None: - for jd in self.jitdrivers_sd: - jd.warmstate.set_param_optimizer(optimizer) - def finish_setup(self, codewriter, optimizer=None): from pypy.jit.metainterp.blackhole import BlackholeInterpBuilder self.blackholeinterpbuilder = BlackholeInterpBuilder(codewriter, self) @@ -1297,7 +1292,6 @@ self.jitdrivers_sd = codewriter.callcontrol.jitdrivers_sd self.virtualref_info = codewriter.callcontrol.virtualref_info self.callinfocollection = codewriter.callcontrol.callinfocollection - self.setup_jitdrivers_sd(optimizer) # # store this information for fastpath of call_assembler # (only the paths that can actually be taken) @@ -1420,6 +1414,7 @@ self.free_frames_list = [] self.last_exc_value_box = None self.retracing_loop_from = None + self.call_pure_results = args_dict_box() def perform_call(self, jitcode, boxes, greenkey=None): # causes the metainterp to enter the given subfunction @@ -1427,10 +1422,13 @@ f.setup_call(boxes) raise ChangeFrame + def is_main_jitcode(self, jitcode): + return self.jitdriver_sd is not None and jitcode is self.jitdriver_sd.mainjitcode + def newframe(self, jitcode, greenkey=None): if jitcode.is_portal: self.in_recursion += 1 - if greenkey is not None: + if greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (greenkey, len(self.history.operations))) if len(self.free_frames_list) > 0: @@ -1443,9 +1441,10 @@ def popframe(self): frame = self.framestack.pop() - if frame.jitcode.is_portal: + jitcode = frame.jitcode + if jitcode.is_portal: self.in_recursion -= 1 - if frame.greenkey is not None: + if frame.greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (None, len(self.history.operations))) # we save the freed MIFrames to avoid needing to re-create new @@ -1636,6 +1635,7 @@ warmrunnerstate = self.jitdriver_sd.warmstate if len(self.history.operations) > warmrunnerstate.trace_limit: greenkey_of_huge_function = self.find_biggest_function() + self.staticdata.stats.record_aborted(greenkey_of_huge_function) self.portal_trace_positions = None if greenkey_of_huge_function is not None: warmrunnerstate.disable_noninlinable_function( @@ -2283,7 +2283,9 @@ return resbox_as_const # not all constants (so far): turn CALL into CALL_PURE, which might # be either removed later by optimizeopt or turned back into CALL. - newop = op.copy_and_change(rop.CALL_PURE, args=[resbox_as_const]+op.getarglist()) + arg_consts = [a.constbox() for a in op.getarglist()] + self.call_pure_results[arg_consts] = resbox_as_const + newop = op.copy_and_change(rop.CALL_PURE, args=op.getarglist()) self.history.operations[-1] = newop return resbox diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: -.. sectnum:: - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -227,6 +227,14 @@ assert isinstance(y, int) return x | y +def op_int_xor(x, y): + # used in computing hashes + if isinstance(x, AddressAsInt): x = llmemory.cast_adr_to_int(x.adr) + if isinstance(y, AddressAsInt): y = llmemory.cast_adr_to_int(y.adr) + assert isinstance(x, int) + assert isinstance(y, int) + return x ^ y + def op_int_mul(x, y): assert isinstance(x, (int, llmemory.AddressOffset)) assert isinstance(y, (int, llmemory.AddressOffset)) diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/doc/jit/_ref.txt b/pypy/doc/jit/_ref.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: -.. sectnum:: - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Wed Mar 23 00:56:57 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 00:56:57 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: merge default Message-ID: <20110322235657.81057282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42854:c245c8c42930 Date: 2011-03-22 17:56 -0600 http://bitbucket.org/pypy/pypy/changeset/c245c8c42930/ Log: merge default diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl From commits-noreply at bitbucket.org Wed Mar 23 01:15:05 2011 From: commits-noreply at bitbucket.org (Alex Perry) Date: Wed, 23 Mar 2011 01:15:05 +0100 (CET) Subject: [pypy-svn] pypy default: Warn about sys.prefix not being set, assure GnuMakefile has relative paths Message-ID: <20110323001505.11BB6282B9D@codespeak.net> Author: Alex Perry Branch: Changeset: r42855:b74c0e804773 Date: 2011-03-22 21:51 +0000 http://bitbucket.org/pypy/pypy/changeset/b74c0e804773/ Log: Warn about sys.prefix not being set, assure GnuMakefile has relative paths diff --git a/pypy/translator/platform/posix.py b/pypy/translator/platform/posix.py --- a/pypy/translator/platform/posix.py +++ b/pypy/translator/platform/posix.py @@ -113,11 +113,16 @@ m.eci = eci def pypyrel(fpath): - rel = py.path.local(fpath).relto(pypypath) + lpath = py.path.local(fpath) + rel = lpath.relto(pypypath) if rel: return os.path.join('$(PYPYDIR)', rel) - else: - return fpath + m_dir = m.makefile_dir + if m_dir == lpath: + return '.' + if m_dir.dirpath() == lpath: + return '..' + return fpath rel_cfiles = [m.pathrel(cfile) for cfile in cfiles] rel_ofiles = [rel_cfile[:-2]+'.o' for rel_cfile in rel_cfiles] diff --git a/pypy/translator/goal/app_main.py b/pypy/translator/goal/app_main.py --- a/pypy/translator/goal/app_main.py +++ b/pypy/translator/goal/app_main.py @@ -205,7 +205,8 @@ if dirname == search: # not found! let's hope that the compiled-in path is ok print >> sys.stderr, ('debug: WARNING: library path not found, ' - 'using compiled-in sys.path') + 'using compiled-in sys.path ' + 'and sys.prefix will be unset') newpath = sys.path[:] break newpath = sys.pypy_initial_path(dirname) diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -229,6 +229,12 @@ ../../.. etc. +If the executable fails to find suitable libraries, it will report +``debug: WARNING: library path not found, using compiled-in sys.path`` +and then attempt to continue normally. If the default path is usable, +most code will be fine. However, the ``sys.prefix`` will be unset +and some existing libraries assume that this is never the case. + In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: $ cd PREFIX From commits-noreply at bitbucket.org Wed Mar 23 01:15:06 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 01:15:06 +0100 (CET) Subject: [pypy-svn] pypy default: merge default Message-ID: <20110323001506.282FB282B9D@codespeak.net> Author: Maciej Fijalkowski Branch: Changeset: r42856:e7967ab4df8d Date: 2011-03-22 18:13 -0600 http://bitbucket.org/pypy/pypy/changeset/e7967ab4df8d/ Log: merge default diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl diff --git a/pypy/doc/getting-started-python.rst b/pypy/doc/getting-started-python.rst --- a/pypy/doc/getting-started-python.rst +++ b/pypy/doc/getting-started-python.rst @@ -39,13 +39,15 @@ [user at debian-box ~]$ sudo apt-get install \ gcc make python-dev libffi-dev pkg-config \ - libz-dev libbz2-dev libncurses-dev libexpat1-dev libssl-dev libgc-dev python-sphinx + libz-dev libbz2-dev libncurses-dev libexpat1-dev \ + libssl-dev libgc-dev python-sphinx python-greenlet On a Fedora box these are:: [user at fedora-or-rh-box ~]$ sudo yum install \ gcc make python-devel libffi-devel pkg-config \ - zlib-devel bzip2-devel ncurses-devel expat-devel openssl-devel gc-devel python-sphinx + zlib-devel bzip2-devel ncurses-devel expat-devel \ + openssl-devel gc-devel python-sphinx python-greenlet The above command lines are split with continuation characters, giving the necessary dependencies first, then the optional ones. @@ -57,6 +59,7 @@ * ``libssl-dev`` (for the optional ``_ssl`` module) * ``libgc-dev`` (for the Boehm garbage collector: only needed when translating with `--opt=0, 1` or `size`) * ``python-sphinx`` (for the optional documentation build) + * ``python-greenlet`` (for the optional stackless support in interpreted mode/testing) 2. Translation is somewhat time-consuming (30 min to over one hour) and RAM-hungry. If you have less than 1.5 GB of From commits-noreply at bitbucket.org Wed Mar 23 01:58:44 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 01:58:44 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Minor tweaks in includes Message-ID: <20110323005844.D540736C211@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42857:a953ba795f6c Date: 2011-03-22 18:58 -0600 http://bitbucket.org/pypy/pypy/changeset/a953ba795f6c/ Log: Minor tweaks in includes diff --git a/pypy/translator/c/src/timer.h b/pypy/translator/c/src/timer.h --- a/pypy/translator/c/src/timer.h +++ b/pypy/translator/c/src/timer.h @@ -1,7 +1,7 @@ #ifndef PYPY_TIMER_H #define PYPY_TIMER_H -/* XXX Some overlap with the stuff in debug_print +/* XXX Some overlap with the stuff in asm_gcc_x86 */ #define OP_LL_READ_TIMESTAMP(v) v = pypy_read_timestamp(); diff --git a/pypy/translator/c/src/g_include.h b/pypy/translator/c/src/g_include.h --- a/pypy/translator/c/src/g_include.h +++ b/pypy/translator/c/src/g_include.h @@ -10,7 +10,6 @@ # include "traceback.h" # include "marshal.h" # include "eval.h" -# include "timer.h" #else # include # include @@ -39,6 +38,7 @@ #include "src/instrument.h" #include "src/asm.h" +#include "src/timer.h" /*** modules ***/ From commits-noreply at bitbucket.org Wed Mar 23 02:43:08 2011 From: commits-noreply at bitbucket.org (lac) Date: Wed, 23 Mar 2011 02:43:08 +0100 (CET) Subject: [pypy-svn] pypy default: remove extra blank line. This is really to check that I can commit things. Message-ID: <20110323014308.46BF8282BAD@codespeak.net> Author: Laura Creighton Branch: Changeset: r42858:0e170ebc2e13 Date: 2011-03-23 02:12 +0100 http://bitbucket.org/pypy/pypy/changeset/0e170ebc2e13/ Log: remove extra blank line. This is really to check that I can commit things. diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -4,7 +4,6 @@ .. contents:: - .. _`try out the translator`: Trying out the translator From commits-noreply at bitbucket.org Wed Mar 23 02:43:08 2011 From: commits-noreply at bitbucket.org (lac) Date: Wed, 23 Mar 2011 02:43:08 +0100 (CET) Subject: [pypy-svn] pypy default: delete obsolete material Message-ID: <20110323014308.E1ED2282BAD@codespeak.net> Author: Laura Creighton Branch: Changeset: r42859:04d276c92744 Date: 2011-03-23 02:42 +0100 http://bitbucket.org/pypy/pypy/changeset/04d276c92744/ Log: delete obsolete material diff --git a/pypy/doc/getting-started-dev.rst b/pypy/doc/getting-started-dev.rst --- a/pypy/doc/getting-started-dev.rst +++ b/pypy/doc/getting-started-dev.rst @@ -17,9 +17,7 @@ * Download and install Pygame_. - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). + * Download and install `Dot Graphviz`_ To start the interactive translator shell do:: From commits-noreply at bitbucket.org Wed Mar 23 06:29:25 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 23 Mar 2011 06:29:25 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: a few new ideas Message-ID: <20110323052925.92F34282BAD@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3390:eb726ff3aa79 Date: 2011-03-23 01:29 -0400 http://bitbucket.org/pypy/extradoc/changeset/eb726ff3aa79/ Log: a few new ideas diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -50,10 +50,13 @@ re.search("(ab)+", "a" * 1000 + "b") almost doesn't get compiled and gets very modest speedups with the JIT on (10-20%) +- consider an automated way to take a function with a loop and generate a + JITable preamble and postamble with a call to the loop in the middle. + OPTIMIZATIONS ------------- -Things we can do mostly by editing optimizeopt.py: +Things we can do mostly by editing optimizeopt/: - getfields which result is never used never get removed (probable cause - they used to be as livevars in removed guards). also getfields which result @@ -77,6 +80,8 @@ Should be just a matter of synthesizing reverse operations in rewrite.py +- strlen result is not reused + PYTHON EXAMPLES --------------- From commits-noreply at bitbucket.org Wed Mar 23 07:50:07 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Wed, 23 Mar 2011 07:50:07 +0100 (CET) Subject: [pypy-svn] jitviewer default: handle unicode here. Message-ID: <20110323065007.7960D282BAD@codespeak.net> Author: Alex Gaynor Branch: Changeset: r112:0cd27965a3bd Date: 2011-03-23 02:49 -0400 http://bitbucket.org/pypy/jitviewer/changeset/0cd27965a3bd/ Log: handle unicode here. diff --git a/_jitviewer/display.py b/_jitviewer/display.py --- a/_jitviewer/display.py +++ b/_jitviewer/display.py @@ -5,7 +5,7 @@ """ A representation of a single line """ def __init__(self, line, in_loop, chunks=None): - self.line = line + self.line = line.decode("utf-8") self.in_loop = in_loop if chunks is None: self.chunks = [] From commits-noreply at bitbucket.org Wed Mar 23 10:33:23 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 10:33:23 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: kill this line which was resurrected by mistake during the merge Message-ID: <20110323093323.88500282BD6@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42861:bc73f43a0153 Date: 2011-03-22 21:09 +0100 http://bitbucket.org/pypy/pypy/changeset/bc73f43a0153/ Log: kill this line which was resurrected by mistake during the merge diff --git a/pypy/module/_rawffi/interp_rawffi.py b/pypy/module/_rawffi/interp_rawffi.py --- a/pypy/module/_rawffi/interp_rawffi.py +++ b/pypy/module/_rawffi/interp_rawffi.py @@ -250,7 +250,6 @@ def get_basic_ffi_type(self): raise NotImplementedError - @unwrap_spec('self', ObjSpace) def descr_get_ffi_type(self, space): # XXX: this assumes that you have the _ffi module enabled. In the long # term, probably we will move the code for build structures and arrays From commits-noreply at bitbucket.org Wed Mar 23 10:33:24 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 10:33:24 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix the unwrap specs after the merge Message-ID: <20110323093324.23756282BAD@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42862:af772f3391ac Date: 2011-03-22 21:22 +0100 http://bitbucket.org/pypy/pypy/changeset/af772f3391ac/ Log: fix the unwrap specs after the merge diff --git a/pypy/module/_ffi/interp_ffi.py b/pypy/module/_ffi/interp_ffi.py --- a/pypy/module/_ffi/interp_ffi.py +++ b/pypy/module/_ffi/interp_ffi.py @@ -212,7 +212,6 @@ floatval = libffi.longlong2float(llval) argchain.arg_longlong(floatval) - @unwrap_spec('self', ObjSpace, 'args_w') def call(self, space, args_w): self = jit.hint(self, promote=True) argchain = self.build_argchain(space, args_w) @@ -322,7 +321,6 @@ else: assert False - @unwrap_spec('self', ObjSpace) def getaddr(self, space): """ Return the physical address in memory of the function @@ -340,7 +338,7 @@ restype = unwrap_ffitype(space, w_restype, allow_void=True) return argtypes_w, argtypes, w_restype, restype - at unwrap_spec(ObjSpace, W_Root, r_uint, str, W_Root, W_Root) + at unwrap_spec(addr=r_uint, name=str) def descr_fromaddr(space, w_cls, addr, name, w_argtypes, w_restype): argtypes_w, argtypes, w_restype, restype = unpack_argtypes(space, w_argtypes, @@ -387,7 +385,7 @@ return W_FuncPtr(func, argtypes_w, w_restype) - @unwrap_spec('self', ObjSpace, str) + @unwrap_spec(name=str) def getaddressindll(self, space, name): try: address_as_uint = rffi.cast(lltype.Unsigned, @@ -397,7 +395,7 @@ "No symbol %s found in library %s", name, self.name) return space.wrap(address_as_uint) - at unwrap_spec(name=str) + at unwrap_spec(name='str_or_None') def descr_new_cdll(space, w_type, name): return space.wrap(W_CDLL(space, name)) From commits-noreply at bitbucket.org Wed Mar 23 10:33:24 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 10:33:24 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix translation which started to fail because rffi.VOIDP now it's != rffi.CHARP Message-ID: <20110323093324.B7642282BAD@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42863:94f22d127bb1 Date: 2011-03-23 09:35 +0100 http://bitbucket.org/pypy/pypy/changeset/94f22d127bb1/ Log: fix translation which started to fail because rffi.VOIDP now it's != rffi.CHARP diff --git a/pypy/rlib/libffi.py b/pypy/rlib/libffi.py --- a/pypy/rlib/libffi.py +++ b/pypy/rlib/libffi.py @@ -440,7 +440,7 @@ def _free_buffers(self, ll_result, ll_args): if ll_result: - self._free_buffer_maybe(ll_result, self.restype) + self._free_buffer_maybe(rffi.cast(rffi.VOIDP, ll_result), self.restype) for i in range(len(self.argtypes)): argtype = self.argtypes[i] self._free_buffer_maybe(ll_args[i], argtype) From commits-noreply at bitbucket.org Wed Mar 23 13:58:00 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 13:58:00 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: manually re-apply this changeset (limited to function.py); test_callbacks.test_callback_void passes again Message-ID: <20110323125800.9661F2A202D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42864:0dec6052e015 Date: 2011-03-23 11:59 +0100 http://bitbucket.org/pypy/pypy/changeset/0dec6052e015/ Log: manually re-apply this changeset (limited to function.py); test_callbacks.test_callback_void passes again changeset: 41736:b6997048efb8 user: Amaury Forgeot d'Arc date: Wed Feb 09 15:50:44 2011 +0100 summary: Don't print a TypeError when a "void f()" callback function returns None. diff --git a/lib_pypy/_ctypes/__init__.py b/lib_pypy/_ctypes/__init__.py --- a/lib_pypy/_ctypes/__init__.py +++ b/lib_pypy/_ctypes/__init__.py @@ -4,7 +4,7 @@ from _ctypes.primitive import _SimpleCData from _ctypes.pointer import _Pointer, _cast_addr from _ctypes.pointer import POINTER, pointer, _pointer_type_cache -from _ctypes.function import CFuncPtr, call_function +from _ctypes.function import CFuncPtr#, call_function from _ctypes.dll import dlopen from _ctypes.structure import Structure from _ctypes.array import Array diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -140,6 +140,8 @@ # A callback into python self.callable = argument ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) + if self._restype_ is None: + ffires = None self._ptr = _rawffi.CallbackPtr(self._wrap_callable(argument, self.argtypes), ffiargs, ffires, self._flags_) From commits-noreply at bitbucket.org Wed Mar 23 13:58:01 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 13:58:01 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: manually re-apply the following changesets: Message-ID: <20110323125801.2B42E2A202D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42865:a7c273787e2a Date: 2011-03-23 13:43 +0100 http://bitbucket.org/pypy/pypy/changeset/a7c273787e2a/ Log: manually re-apply the following changesets: changeset: 42574:b16c3795183e user: tav date: Sun Mar 13 18:58:12 2011 +0000 summary: Added kwargs/extended paramflags support and tests to ctypes.CFUNCTYPE. changeset: 42530:04456b424578 parent: 42528:0d2a71cb22e1 user: tav date: Sat Mar 12 18:09:08 2011 +0000 summary: Improved ctypes.CFUNCTYPE prototype and paramflags handling. diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -9,11 +9,24 @@ import sys import traceback + # XXX this file needs huge refactoring I fear PARAMFLAG_FIN = 0x1 PARAMFLAG_FOUT = 0x2 PARAMFLAG_FLCID = 0x4 +PARAMFLAG_COMBINED = PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID + +VALID_PARAMFLAGS = ( + 0, + PARAMFLAG_FIN, + PARAMFLAG_FIN | PARAMFLAG_FOUT, + PARAMFLAG_FIN | PARAMFLAG_FLCID + ) + +WIN64 = sys.platform == 'win32' and sys.maxint == 2**63 - 1 + + def get_com_error(errcode, riid, pIunk): "Win32 specific: build a COM Error exception" # XXX need C support code @@ -59,10 +72,11 @@ def _getargtypes(self): return self._argtypes_ + def _setargtypes(self, argtypes): self._ptr = None if argtypes is None: - self._argtypes_ = None + self._argtypes_ = () else: for i, argtype in enumerate(argtypes): if not hasattr(argtype, 'from_param'): @@ -76,20 +90,68 @@ self._argtypes_ = list(argtypes) argtypes = property(_getargtypes, _setargtypes) + def _getparamflags(self): + return self._paramflags + + def _setparamflags(self, paramflags): + if paramflags is None or not self._argtypes_: + self._paramflags = None + return + if not isinstance(paramflags, tuple): + raise TypeError("paramflags must be a tuple or None") + if len(paramflags) != len(self._argtypes_): + raise ValueError("paramflags must have the same length as argtypes") + for idx, paramflag in enumerate(paramflags): + paramlen = len(paramflag) + name = default = None + if paramlen == 1: + flag = paramflag[0] + elif paramlen == 2: + flag, name = paramflag + elif paramlen == 3: + flag, name, default = paramflag + else: + raise TypeError( + "paramflags must be a sequence of (int [,string [,value]]) " + "tuples" + ) + if not isinstance(flag, int): + raise TypeError( + "paramflags must be a sequence of (int [,string [,value]]) " + "tuples" + ) + _flag = flag & PARAMFLAG_COMBINED + if _flag == PARAMFLAG_FOUT: + typ = self._argtypes_[idx] + if getattr(typ, '_ffiargshape', None) not in ('P', 'z', 'Z'): + raise TypeError( + "'out' parameter %d must be a pointer type, not %s" + % (idx+1, type(typ).__name__) + ) + elif _flag not in VALID_PARAMFLAGS: + raise TypeError("paramflag value %d not supported" % flag) + self._paramflags = paramflags + + paramflags = property(_getparamflags, _setparamflags) + + def _getrestype(self): return self._restype_ + def _setrestype(self, restype): self._ptr = None if restype is int: from ctypes import c_int restype = c_int - if not isinstance(restype, _CDataMeta) and not restype is None and \ - not callable(restype): - raise TypeError("Expected ctypes type, got %s" % (restype,)) + if not (isinstance(restype, _CDataMeta) or restype is None or + callable(restype)): + raise TypeError("restype must be a type, a callable, or None") self._restype_ = restype + def _delrestype(self): self._ptr = None del self._restype_ + restype = property(_getrestype, _setrestype, _delrestype) def _geterrcheck(self): @@ -127,17 +189,25 @@ self.name = None self._objects = {keepalive_key(0):self} self._needs_free = True - argument = None - if len(args) == 1: - argument = args[0] - if isinstance(argument, (int, long)): - # direct construction from raw address + # Empty function object -- this is needed for casts + if not args: + self._set_address(0) + return + + argsl = list(args) + argument = argsl.pop(0) + + # Direct construction from raw address + if isinstance(argument, (int, long)) and not argsl: self._set_address(argument) argshapes, resshape = self._ffishapes(self._argtypes_, self._restype_) self._ptr = self._getfuncptr_fromaddress(argshapes, resshape) - elif callable(argument): - # A callback into python + return + + + # A callback into python + if callable(argument) and not argsl: self.callable = argument ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) if self._restype_ is None: @@ -146,33 +216,40 @@ self.argtypes), ffiargs, ffires, self._flags_) self._buffer = self._ptr.byptr() - elif isinstance(argument, tuple) and len(argument) == 2: - # function exported from a shared library + return + + # Function exported from a shared library + if isinstance(argument, tuple) and len(argument) == 2: import ctypes - self.name, self.dll = argument - if isinstance(self.dll, str): + self.name, dll = argument + if isinstance(dll, str): self.dll = ctypes.CDLL(self.dll) - # we need to check dll anyway + else: + self.dll = dll + if argsl: + self.paramflags = argsl.pop(0) + if argsl: + raise TypeError("Unknown constructor %s" % (args,)) + # We need to check dll anyway ptr = self._getfuncptr([], ctypes.c_int) self._set_address(ptr.getaddr()) + return - elif (sys.platform == 'win32' and - len(args) >= 2 and isinstance(args[0], (int, long))): - # A COM function call, by index + # A COM function call, by index + if (sys.platform == 'win32' and isinstance(argument, (int, long)) + and argsl): ffiargs, ffires = self._ffishapes(self._argtypes_, self._restype_) - self._com_index = args[0] + 0x1000 - self.name = args[1] - if len(args) > 2: - self._paramflags = args[2] - # XXX ignored iid = args[3] + self._com_index = argument + 0x1000 + self.name = argsl.pop(0) + if argsl: + self.paramflags = argsl.pop(0) + if argsl: + self._com_iid = argsl.pop(0) + if argsl: + raise TypeError("Unknown constructor %s" % (args,)) + return - elif len(args) == 0: - # Empty function object. - # this is needed for casts - self._set_address(0) - return - else: - raise TypeError("Unknown constructor %s" % (args,)) + raise TypeError("Unknown constructor %s" % (args,)) def _wrap_callable(self, to_call, argtypes): def f(*args): @@ -182,16 +259,17 @@ return to_call(*args) return f - def __call__(self, *args): + def __call__(self, *args, **kwargs): + argtypes = self._argtypes_ if self.callable is not None: - if len(args) == len(self._argtypes_): + if len(args) == len(argtypes): pass elif self._flags_ & _rawffi.FUNCFLAG_CDECL: - if len(args) < len(self._argtypes_): - plural = len(self._argtypes_) > 1 and "s" or "" + if len(args) < len(argtypes): + plural = len(argtypes) > 1 and "s" or "" raise TypeError( "This function takes at least %d argument%s (%s given)" - % (len(self._argtypes_), plural, len(args))) + % (len(argtypes), plural, len(args))) else: # For cdecl functions, we allow more actual arguments # than the length of the argtypes tuple. @@ -205,7 +283,7 @@ # check that arguments are convertible ## XXX Not as long as ctypes.cast is a callback function with ## py_object arguments... - ## self._convert_args(self._argtypes_, args) + ## self._convert_args(self._argtypes_, args, {}) try: res = self.callable(*args) @@ -217,11 +295,17 @@ if self._restype_ is not None: return res return - argtypes = self._argtypes_ + + if argtypes is None: + argtypes = [] if self._com_index: assert False, 'TODO2' from ctypes import cast, c_void_p, POINTER + if not args: + raise ValueError( + "native COM method call without 'this' parameter" + ) thisarg = cast(args[0], POINTER(POINTER(c_void_p))).contents argtypes = [c_void_p] + list(argtypes) args = list(args) @@ -229,9 +313,7 @@ else: thisarg = None - if argtypes is None: - argtypes = [] - newargs, argtypes = self._convert_args(argtypes, args) + newargs, argtypes, outargs = self._convert_args(argtypes, args, kwargs) funcptr = self._getfuncptr(argtypes, self._restype_, thisarg) result = self._call_funcptr(funcptr, *newargs) @@ -239,13 +321,20 @@ if self._errcheck_: v = self._errcheck_(result, self, args) # If the errcheck funtion failed, let it throw - # If the errcheck function returned callargs unchanged, + # If the errcheck function returned newargs unchanged, # continue normal processing. # If the errcheck function returned something else, # use that as result. if v is not args: result = v - return result + + #return result + if not outargs: + return result + if len(outargs) == 1: + return outargs[0] + return tuple(outargs) + def _call_funcptr(self, funcptr, *newargs): @@ -303,6 +392,10 @@ if self._flags_ & _rawffi.FUNCFLAG_CDECL: raise + # Win64 has no stdcall calling conv, so it should also not have the + # name mangling of it. + if WIN64: + raise # For stdcall, try mangled names: # funcname -> _funcname@ # where n is 0, 4, 8, 12, ..., 128 @@ -348,60 +441,81 @@ return cobj._to_ffi_param(), type(cobj) - def _convert_args(self, argtypes, args): + def _convert_args(self, argtypes, args, kwargs, marker=object()): newargs = [] + outargs = [] newargtypes = [] - consumed = 0 + total = len(args) + paramflags = self._paramflags + + if self._com_index: + inargs_idx = 1 + else: + inargs_idx = 0 + + if not paramflags and total < len(argtypes): + raise TypeError("not enough arguments") for i, argtype in enumerate(argtypes): - defaultvalue = None - if i > 0 and self._paramflags is not None: - paramflag = self._paramflags[i-1] - if len(paramflag) == 2: - idlflag, name = paramflag - elif len(paramflag) == 3: - idlflag, name, defaultvalue = paramflag - else: - idlflag = 0 - idlflag &= (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID) - - if idlflag in (0, PARAMFLAG_FIN): - pass - elif idlflag == PARAMFLAG_FOUT: - import ctypes - val = argtype._type_() - wrapped = (val, ctypes.byref(val)) - newargs.append(wrapped._to_ffi_param()) - newargtypes.append(type(wrapped)) - continue - elif idlflag == PARAMFLAG_FIN | PARAMFLAG_FLCID: - # Always taken from defaultvalue if given, - # else the integer 0. - val = defaultvalue - if val is None: + flag = 0 + name = None + defval = marker + if paramflags: + paramflag = paramflags[i] + paramlen = len(paramflag) + name = None + if paramlen == 1: + flag = paramflag[0] + elif paramlen == 2: + flag, name = paramflag + elif paramlen == 3: + flag, name, defval = paramflag + flag = flag & PARAMFLAG_COMBINED + if flag == PARAMFLAG_FIN | PARAMFLAG_FLCID: + val = defval + if val is marker: val = 0 newarg, newargtype = self._conv_param(argtype, val) newargs.append(newarg) newargtypes.append(newargtype) - continue + elif flag in (0, PARAMFLAG_FIN): + if inargs_idx < total: + val = args[inargs_idx] + inargs_idx += 1 + elif kwargs and name in kwargs: + val = kwargs[name] + inargs_idx += 1 + elif defval is not marker: + val = defval + elif name: + raise TypeError("required argument '%s' missing" % name) + else: + raise TypeError("not enough arguments") + newarg, newargtype = self._conv_param(argtype, val) + newargs.append(newarg) + newargtypes.append(newargtype) + elif flag == PARAMFLAG_FOUT: + if defval is not marker: + outargs.append(defval) + newarg, newargtype = self._conv_param(argtype, defval) + else: + import ctypes + val = argtype._type_() + outargs.append(val) + newarg = ctypes.byref(val) + newargtype = type(newarg) + newargs.append(newarg) + newargtypes.append(newargtype) else: - raise NotImplementedError( - "paramflags = %s" % (self._paramflags[i-1],)) - - if consumed < len(args): - arg = args[consumed] - elif defaultvalue is not None: - arg = defaultvalue + raise ValueError("paramflag %d not yet implemented" % flag) else: - raise TypeError("Not enough arguments") - - try: - newarg, newargtype = self._conv_param(argtype, arg) - except (UnicodeError, TypeError, ValueError), e: - raise ArgumentError(str(e)) - newargs.append(newarg) - newargtypes.append(newargtype) - consumed += 1 + try: + newarg, newargtype = self._conv_param(argtype, args[i]) + except (UnicodeError, TypeError, ValueError), e: + raise ArgumentError(str(e)) + newargs.append(newarg) + newargtypes.append(newargtype) + inargs_idx += 1 if len(newargs) < len(args): extra = args[len(newargs):] @@ -412,7 +526,7 @@ raise ArgumentError(str(e)) newargs.append(newarg) newargtypes.append(newargtype) - return newargs, newargtypes + return newargs, newargtypes, outargs def _wrap_result(self, restype, result): @@ -472,35 +586,6 @@ else: retval = self._wrap_result(restype, result) - results = [] - if self._paramflags: - for obj, paramflag in zip(argsandobjs[1:], self._paramflags): - if len(paramflag) == 2: - idlflag, name = paramflag - elif len(paramflag) == 3: - idlflag, name, defaultvalue = paramflag - else: - idlflag = 0 - idlflag &= (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID) - - if idlflag in (0, PARAMFLAG_FIN): - pass - elif idlflag == PARAMFLAG_FOUT: - val = obj.__ctypes_from_outparam__() - results.append(val) - elif idlflag == PARAMFLAG_FIN | PARAMFLAG_FLCID: - pass - else: - raise NotImplementedError( - "paramflags = %s" % (paramflag,)) - - if results: - if len(results) == 1: - return results[0] - else: - return tuple(results) - - # No output parameter, return the actual function result. return retval def __nonzero__(self): From commits-noreply at bitbucket.org Wed Mar 23 13:58:03 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 13:58:03 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: manually re-apply the following changeset: Message-ID: <20110323125803.0F7CC2A202D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42866:acde19d2e8a9 Date: 2011-03-23 13:49 +0100 http://bitbucket.org/pypy/pypy/changeset/acde19d2e8a9/ Log: manually re-apply the following changeset: changeset: 42636:869b900efc11 user: Amaury Forgeot d'Arc date: Mon Mar 14 13:53:10 2011 +0100 summary: Implement _ctypes.call_function() and fix on test in test_random_things diff --git a/lib_pypy/_ctypes/__init__.py b/lib_pypy/_ctypes/__init__.py --- a/lib_pypy/_ctypes/__init__.py +++ b/lib_pypy/_ctypes/__init__.py @@ -4,7 +4,7 @@ from _ctypes.primitive import _SimpleCData from _ctypes.pointer import _Pointer, _cast_addr from _ctypes.pointer import POINTER, pointer, _pointer_type_cache -from _ctypes.function import CFuncPtr#, call_function +from _ctypes.function import CFuncPtr, call_function from _ctypes.dll import dlopen from _ctypes.structure import Structure from _ctypes.array import Array diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -33,6 +33,13 @@ from _ctypes import COMError return COMError(errcode, None, None) +def call_function(func, args): + "Only for debugging so far: So that we can call CFunction instances" + funcptr = CFuncPtr(func) + funcptr.restype = int + return funcptr(*args) + + class CFuncPtrType(_CDataMeta): # XXX write down here defaults and such things @@ -168,6 +175,8 @@ errcheck = property(_geterrcheck, _seterrcheck, _delerrcheck) def _ffishapes(self, args, restype): + if args is None: + args = [] argtypes = [arg._ffiargshape for arg in args] if restype is not None: if not isinstance(restype, SimpleType): From commits-noreply at bitbucket.org Wed Mar 23 13:58:03 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 13:58:03 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: manually re-apply the following changeset: Message-ID: <20110323125803.EFF9F2A202D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42867:6da06b14d77e Date: 2011-03-23 13:55 +0100 http://bitbucket.org/pypy/pypy/changeset/6da06b14d77e/ Log: manually re-apply the following changeset: changeset: 42637:6981cdab2ad4 user: Amaury Forgeot d'Arc date: Mon Mar 14 13:56:51 2011 +0100 summary: COM methods are boolean True. Fix one test in test_pointers.py diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -598,7 +598,7 @@ return retval def __nonzero__(self): - return bool(self._buffer[0]) + return self._com_index is not None or bool(self._buffer[0]) def __del__(self): if self._needs_free: From commits-noreply at bitbucket.org Wed Mar 23 13:58:04 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 23 Mar 2011 13:58:04 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix test Message-ID: <20110323125804.D45322A202D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42868:0b2e8fcbcf54 Date: 2011-03-23 13:56 +0100 http://bitbucket.org/pypy/pypy/changeset/0b2e8fcbcf54/ Log: fix test diff --git a/pypy/module/test_lib_pypy/ctypes_tests/test_numbers.py b/pypy/module/test_lib_pypy/ctypes_tests/test_numbers.py --- a/pypy/module/test_lib_pypy/ctypes_tests/test_numbers.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/test_numbers.py @@ -157,7 +157,7 @@ def test_float_from_address(self): - from _rawffi import Array + from array import array for t in float_types: if t is c_longdouble: # no support for 'g' in the array module continue @@ -168,7 +168,6 @@ a[0] = 2.3456e17 assert v.value == a[0] assert type(v) is t - a.free() def test_char_from_address(self): from ctypes import c_char From commits-noreply at bitbucket.org Wed Mar 23 15:08:20 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 23 Mar 2011 15:08:20 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: import blog posts into latex. will need tons of tweaking, but it's a start Message-ID: <20110323140820.364B936C20F@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3391:20a682f0b923 Date: 2011-03-23 14:17 +0100 http://bitbucket.org/pypy/extradoc/changeset/20a682f0b923/ Log: import blog posts into latex. will need tons of tweaking, but it's a start diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/paper.tex @@ -0,0 +1,1198 @@ +%\documentclass{acm_proc_article-sp} +\documentclass{sig-alternate} + +\usepackage{ifthen} +\usepackage{fancyvrb} +\usepackage{color} +\usepackage{ulem} +\usepackage{xspace} +\usepackage[utf8]{inputenc} + +\makeatletter +\def\PY at reset{\let\PY at it=\relax \let\PY at bf=\relax% + \let\PY at ul=\relax \let\PY at tc=\relax% + \let\PY at bc=\relax \let\PY at ff=\relax} +\def\PY at tok#1{\csname PY at tok@#1\endcsname} +\def\PY at toks#1+{\ifx\relax#1\empty\else% + \PY at tok{#1}\expandafter\PY at toks\fi} +\def\PY at do#1{\PY at bc{\PY at tc{\PY at ul{% + \PY at it{\PY at bf{\PY at ff{#1}}}}}}} +\def\PY#1#2{\PY at reset\PY at toks#1+\relax+\PY at do{#2}} + +\def\PY at tok@gd{\def\PY at bc##1{\fcolorbox[rgb]{0.80,0.00,0.00}{1.00,0.80,0.80}{##1}}} +\def\PY at tok@gu{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}} +\def\PY at tok@gt{\def\PY at tc##1{\textcolor[rgb]{0.60,0.80,0.40}{##1}}} +\def\PY at tok@gs{\let\PY at bf=\textbf} +\def\PY at tok@gr{\def\PY at tc##1{\textcolor[rgb]{1.00,0.00,0.00}{##1}}} +\def\PY at tok@cm{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} +\def\PY at tok@vg{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} +\def\PY at tok@m{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@mh{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@cs{\let\PY at bf=\textbf\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} +\def\PY at tok@ge{\let\PY at it=\textit} +\def\PY at tok@vc{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} +\def\PY at tok@il{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@go{\def\PY at tc##1{\textcolor[rgb]{0.67,0.67,0.67}{##1}}} +\def\PY at tok@cp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,0.60}{##1}}} +\def\PY at tok@gi{\def\PY at bc##1{\fcolorbox[rgb]{0.00,0.80,0.00}{0.80,1.00,0.80}{##1}}} +\def\PY at tok@gh{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}} +\def\PY at tok@ni{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,0.60}{##1}}} +\def\PY at tok@nl{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}} +\def\PY at tok@nn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.80,1.00}{##1}}} +\def\PY at tok@no{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.00}{##1}}} +\def\PY at tok@na{\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}} +\def\PY at tok@nb{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}} +\def\PY at tok@nc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.67,0.53}{##1}}} +\def\PY at tok@nd{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}} +\def\PY at tok@ne{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,0.00}{##1}}} +\def\PY at tok@nf{\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,1.00}{##1}}} +\def\PY at tok@si{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}} +\def\PY at tok@s2{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@vi{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} +\def\PY at tok@nt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}} +\def\PY at tok@nv{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} +\def\PY at tok@s1{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@gp{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.60}{##1}}} +\def\PY at tok@sh{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@ow{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.00}{##1}}} +\def\PY at tok@sx{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@bp{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}} +\def\PY at tok@c1{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} +\def\PY at tok@kc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@c{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} +\def\PY at tok@mf{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@err{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}\def\PY at bc##1{\colorbox[rgb]{1.00,0.67,0.67}{##1}}} +\def\PY at tok@kd{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@ss{\def\PY at tc##1{\textcolor[rgb]{1.00,0.80,0.20}{##1}}} +\def\PY at tok@sr{\def\PY at tc##1{\textcolor[rgb]{0.20,0.67,0.67}{##1}}} +\def\PY at tok@mo{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@mi{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} +\def\PY at tok@kn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@o{\def\PY at tc##1{\textcolor[rgb]{0.33,0.33,0.33}{##1}}} +\def\PY at tok@kr{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@s{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@kp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@w{\def\PY at tc##1{\textcolor[rgb]{0.73,0.73,0.73}{##1}}} +\def\PY at tok@kt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.47,0.53}{##1}}} +\def\PY at tok@sc{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@sb{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@k{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} +\def\PY at tok@se{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} +\def\PY at tok@sd{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} + +\def\PYZbs{\char`\\} +\def\PYZus{\char`\_} +\def\PYZob{\char`\{} +\def\PYZcb{\char`\}} +\def\PYZca{\char`\^} +% for compatibility with earlier versions +\def\PYZat{@} +\def\PYZlb{[} +\def\PYZrb{]} +\makeatother + + +\ifthenelse{\isundefined{\hypersetup}}{ + \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref} +}{} +\hypersetup{ + pdftitle={Controlling the Tracing of an Interpreter With Hints, Part 1: Controlling the Extent of Tracing}, +} + +\newboolean{showcomments} +\setboolean{showcomments}{false} +\ifthenelse{\boolean{showcomments}} + {\newcommand{\nb}[2]{ + \fbox{\bfseries\sffamily\scriptsize#1} + {\sf\small$\blacktriangleright$\textit{#2}$\blacktriangleleft$} + } + \newcommand{\version}{\emph{\scriptsize$-$Id: main.tex 19055 2008-06-05 11:20:31Z cfbolz $-$}} + } + {\newcommand{\nb}[2]{} + \newcommand{\version}{} + } + +\newcommand\cfbolz[1]{\nb{CFB}{#1}} +\newcommand\toon[1]{\nb{TOON}{#1}} +\newcommand\anto[1]{\nb{ANTO}{#1}} +\newcommand\arigo[1]{\nb{AR}{#1}} +\newcommand\fijal[1]{\nb{FIJAL}{#1}} +\newcommand{\commentout}[1]{} + +\newcommand\ie{i.e.,\xspace} +\newcommand\eg{e.g.,\xspace} + +\normalem + +\let\oldcite=\cite + +\renewcommand\cite[1]{\ifthenelse{\equal{#1}{XXX}}{[citation~needed]}{\oldcite{#1}}} + +% compressing itemize env, in case we need it +\newenvironment{zitemize}% zero - line spacing itemize environment + {\begin{list}{--}{ + \setlength{\itemsep}{0 pt} + \setlength{\parsep}{0 pt} + \setlength{\topsep} {0 pt} }}% the end stuff + {\end{list}} + + +\begin{document} + +\title{XXX in a Tracing JIT Compiler for Efficient Dynamic Languages} + +\numberofauthors{4} +\author{ +\alignauthor Carl Friedrich Bolz\\ + \affaddr{University of Düsseldorf}\\ + \affaddr{STUPS Group}\\ + \affaddr{Germany}\\ + \email{cfbolz at gmx.de} +\alignauthor XXX + \affaddr{XXX}\\ + \email{XXX} +} +\conferenceinfo{ICOOOLPS}{'09 Genova, Italy} +\CopyrightYear{2009} +\crdata{978-1-60558-541-3/09/07} + +\maketitle + +\category{D.3.4}{Programming Languages}{Processors}[code generation, +incremental compilers, interpreters, run-time environments] + +\begin{abstract} + + +\end{abstract} + + +\section{Introduction} + + +\section{The PyPy Project} +\label{sect:pypy} + +XXX + + +\section{Tracing JIT Compilers} +\label{sect:tracing} + +XXX + +\section{Controlling The Extent of Tracing} + +The question I was asked most often during my recent \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{US trip} was how exactly +the hints work that interpreter authors can use to improve the execution speed +of the programs running on their interpreters. Since those hints are not really +documented all that well, I decided to write blog posts about them. This is the +first one. + + +%___________________________________________________________________________ + +\subsection{Background} + +First, let's recap some basics: PyPy's approach to implementing dynamic +languages is to write an interpreter for +the language in RPython. This interpreter can be translated to C and then +further to machine code. The interpreter consists of code in the form of a +large number of generated C functions and some data. Similarly, the user +program consists of functions in the language the interpreter executes. + +As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a +meta-tracer. Since we want to re-use our tracer for a variety of languages, we +don't trace the execution of the user program, but instead trace the execution +of the \emph{interpreter} that is running the program. This means that the traces +don't contain the bytecodes of the language in question, but RPython-level +operations that the interpreter did to execute the program. + +On the other hand, the loops that are traced by the tracer are the loops in the +user program. This means that the tracer stops tracing after one iteration of +the loop in the user function that is being considered. At this point, it can +have traced many iterations of the interpreter main loop. + +Here's a diagram of this process: + +\begin{figure*} +\includegraphics[scale=0.5]{figures/trace-levels} +\caption{The levels involved in tracing} +\label{fig:trace-levels} +\end{figure*} + +On the left you see the levels of execution. The CPU executes the binary of +PyPy's Python interpreter, which consists of RPython functions that have been +compiled first to C, then to machine code. Some of these functions contain +loops, others don't. The interpreter runs a Python program written by a +programmer (the user). If the tracer is used, it traces operations on the level +of the interpreter. However, the extent of the trace is determined by the loops +in the user program. + + +%___________________________________________________________________________ + +\subsection{How Far Should Tracing Go} + +When the tracer encounters a function call at the interpreter level, e.g. the +interpreter main loop calling a helper function, it can do one of two things: + +\begin{enumerate} +\item it can trace into the helper function, effectively inlining it into the trace. + +\item it can not trace into the function and instead record a call to that function +as an operation in the trace. Such a call operation in the trace is sometimes +called \emph{residual call}. +\end{enumerate} + +As a default, the tracer will try to trace into the helper because that will +give more information to the optimizer, allowing it to do a better job. This is +particularly important for the allocation removal optimization, because if a +freshly allocated object is passed as an argument to a residual call, its +allocation cannot be optimized away. + +There is a problem however if the helper function itself contains a loop. The +tracer records the linear sequence of operations that are being executed. Thus +when it encounters a loop on the interpreter level it records all the +operations of every iteration of the loop itself, with the net effect of +unrolling it. The only places where the tracer stops and tries to close the +trace is in the main loop of the interpreter. When the tracer encounters the +main loop, it also checks whether the original user loop has been closed, and +thus whether it can stop tracing. + +For most helper functions in the interpreter that contain loops, fully +unrolling does not make sense. If a loop is unrolled, the trace is specific to +the number of iteration that was seen during tracing. If the trace is later +executed with a different number of iterations, the trace will be left via a +guard failure, which is inefficient. Therefore the default behaviour of the +tracer is to never trace into a function on the interpreter level that contains +a loop, but to trace into all non-looping helper functions. + +This default behaviour is essentially a heuristic, but one that usually makes +sense. We want to produce just enough traces to make the resulting code +efficient, but not more. Therefore we trace as much as possible (everything by +default) except the functions which loops where tracing would produce code that +is less general than it could be. + +As an example for a helper with a loop, take string concatenation. It loops over +the characters of both arguments and copies them over into the result string. It +does not make sense to unroll the loops in this function. If we do that, +the resulting trace can only be used for strings of the length that was seen +during tracing. In practise, the string lengths are usually different each run, +meaning that the trace with unrolling is not run to completion in most cases. + + +%___________________________________________________________________________ + +\subsection{Influencing the Default Behaviour} + +Sometimes the default behaviour is not actually what is wanted. This is +something the interpreter author has to decide, usually by looking at the traces +that are produced and deciding that they should be improved. There are two ways +in which the default is wrong: +% +\begin{itemize} + +\item \textbf{false negatives:} if a helper function that \textbf{does} contain a loop should +be traced into, unrolling the loop. + +\item \textbf{false positives:} if a helper function that \textbf{does not} contain a loop is +inlined into the trace, but the interpreter author decides that this is not +helpful. + +\end{itemize} + +If the interpreter author finds false negatives or false positives, she can fix +that by applying a hint to the tracer. These hints take the form of function +decorators (which both live in the \texttt{pypy.rlib.jit} module). In the next two +subsections I will describe these two function decorators and their use. + + +%___________________________________________________________________________ + +\subsubsection{Unrolling Functions With Loops} + +The first decorator, used to fix false negatives, is the \texttt{unroll\_safe} +decorator. It is used to tell the tracer to always trace into a function that +has a loop, effectively unrolling the loop. This decorator should be used only +if the loop in the helper function is expected to always run for the same number +of iterations. This sounds like a strong restriction, in practise this is less +severe: The number of iterations needs to only be the same \emph{in the context where +the helper functions is traced from}. + +It is easiest to understand this condition via an example. Let's look at the +\texttt{BUILD\_TUPLE} bytecode in Python. It takes one argument, the length \texttt{n} of +the tuple being built. The bytecode pops \texttt{n} arguments from the stack, turns +them into a tuple and pushes that tuple on the stack. Thus the function that +implements \texttt{BUILD\_TUPLE} in PyPy's Python interpreter calls a helper +\texttt{popvalues} which pops \texttt{n} values from the stack and returns them in a list. +This helper is implemented with a loop and would thus not be traced into by +default. The loop in the helper can run for very different numbers of +iterations, because it is used in a variety of places. However, for every +concrete \texttt{BUILD\_TUPLE} bytecode, the argument will be constant. Therefore it +is safe (and even necessary) to annotate \texttt{popvalues} with the \texttt{unroll\_safe} +decorator. + +A different example is the implementation of the \texttt{isinstance} builtin. It is +used to check whether an object \texttt{a} is an instance of a class \texttt{B} like +this: \texttt{isinstance(a, B)}. The second argument of the function can also be a +tuple of classes to check whether an object is an instance of one of a number of +classes: \texttt{isinstance(a, (A, B, C, D))}. To implement this second case, the +implementation of \texttt{isinstance} contains a loop iterating over the elements of +the tuple. The number of loop iterations can vary, but is usually fixed for each +individual call site which typically just lists a few classes in the source +code. Therefore it is also safe to annotate the implementation of \texttt{isinstance} +with the \texttt{unroll\_safe} decorator. + + +%___________________________________________________________________________ + +\subsubsection{Preventing the Tracing of Functions} + +The second decorator \texttt{dont\_look\_inside} is used to fix false positives. It +tells the JIT to never trace into the decorated function and just always produce +a residual call instead. This decorator is in many ways less important than the +unrolling one (except for a special situation that I will describe in a +follow-up post). It is used if tracing into a function is not expected to yield +any speed benefits, because the optimizer will not be able to improve it much. +This is often the case if the called helper function does not contain any +``dynamic'' behaviour. In such a situation it is better to just leave the function +call in the trace, because that produces less code. + +An example would be the import mechanism in Python. It's very unlikely that any +performance improvement can be had by turning part of it into assembler. +Therefore we hide it from the tracer by annotating them with +\texttt{dont\_look\_inside}. + + +%___________________________________________________________________________ + +\subsection{Conclusion} + +In this post we discussed two hints that can be used to control precisely which +parts of the interpreter should be meta-traced. If these hints are used +carefully, this can go a long way to making the interpreter produce traces that +contain exactly the interesting part of the execution, and will contain calls to +the functions that can not be optimized by tracing techniques. + +In the next part of this series I will discuss a different set of hints that can +be used to strongly optimize traces. + + +% Document title +\section{Controlling the Tracing of an Interpreter With Hints, Part 2: Controlling Optimization} + +This is part 2 of a series on how to speed up an interpreter written with PyPy +by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control the +extent of tracing}. In this post I will describe how to add hints that +influence the optimizer. If applied correctly these techniques can give +really big speedups by pre-computing parts of what happens at runtime. On the other +hand, if applied incorrectly they might lead to code bloat, thus making the +resulting program actually slower. + + +%___________________________________________________________________________ + +\subsection{Background} + +Before sending the trace to the backend to produce actual machine code, it is +optimized. The optimizer applies a number of techniques to remove or reduce +the number of operations: most of these are well known \href{http://en.wikipedia.org/wiki/Compiler_optimization\#Optimization_techniques}{compiler optimization +techniques}, with the difference that it is easier to apply them in a tracing +JIT because it only has to deal with linear traces. Among the techniques: +% +\begin{itemize} + +\item \href{http://en.wikipedia.org/wiki/Constant_folding}{constant folding} + +\item \href{http://en.wikipedia.org/wiki/Common_subexpression_elimination}{common subexpression elimination} + +\item allocation removal, as described in the paper that I recently \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{presented at +PEPM} + +\item store/load propagation + +\item \href{http://morepypy.blogspot.com/2011/01/loop-invariant-code-motion.html}{loop invariant code motion} + +\end{itemize} + +In some places it turns out that if the interpreter author rewrites some parts +of the interpreter with these optimizations in mind the traces that are produced +by the optimizer can be vastly improved. + +In this post I will describe two hints that allow the interpreter author to +increase the optimization opportunities for constant folding. For constant +folding to work, two conditions need +to be met: +% +\begin{itemize} + +\item the arguments of an operation actually need to all be constant, +i.e. statically known by the optimizer + +\item the operation needs to be \emph{pure}, i.e. always yield the same result given +the same arguments. + +\end{itemize} + +The PyPy JIT generator automatically detects the majority of these conditions. +However, for the cases in which the automatic detection does not work, the +interpreter author can apply \textbf{hints} to improve the optimization +opportunities. There is one kind of hint for both of the conditions above. + +\textbf{Note}: These hints are written by an interpreter developer and applied to the +RPython source of the interpreter. Normal Python users will never see them. + + +%___________________________________________________________________________ + +\subsection{Where Do All the Constants Come From} + +It is worth clarifying what is a ``constant'' in this context. A variable of +the trace is said to be constant if its value is statically known by the +optimizer. + +The simplest example of constants are literal values. For example, if in the +RPython source code we have a line like \texttt{y = x + 1}, the second operand will +be a constant in the trace. + +However, the optimizer can statically know the value of a variable even if it +is not a constant in the original source code. For example, consider the +following fragment of RPython code: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{if} \PY{n}{x} \PY{o}{==} \PY{l+m+mi}{4}\PY{p}{:} + \PY{n}{y} \PY{o}{=} \PY{n}{y} \PY{o}{+} \PY{n}{x} +\end{Verbatim} + +If the fragment is traced with \texttt{x} being \texttt{4}, the following trace is +produced: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(x~==~4)\\ +y~=~y~+~x +} +\end{quote} + +In the trace above, the value of \texttt{x} is statically known thanks to the +guard. Remember that a guard is a runtime check. The above trace will run to +completion when \texttt{x == 4}. If the check fails, execution of the trace is +stopped and the interpreter continues to run. + +There are cases in which it is useful to turn an arbitrary variable +into a constant value. This process is called \emph{promotion} and it is an old idea +in partial evaluation (it's called ``the trick'' there). Promotion is also heavily +used by \href{http://psyco.sourceforge.net/}{Psyco} and by all older versions of PyPy's JIT. Promotion is a technique +that only works well in JIT compilers, in +static compilers it is significantly less applicable. + +Promotion is essentially a tool for trace specialization. In some places in the +interpreter it would be very useful if a variable were constant, even though it +could have different values in practice. In such a place, promotion is used. The +typical reason to do that is if there is +a lot of computation depending on the value of that variable. + +Let's make this more concrete. If we trace a call to the following function: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{def} \PY{n+nf}{f1}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} + \PY{n}{z} \PY{o}{=} \PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} + \PY{k}{return} \PY{n}{z} \PY{o}{+} \PY{n}{y} +\end{Verbatim} + +We get a trace that looks like this: +% +\begin{quote}{\ttfamily \raggedright \noindent +v1~=~x~*~2\\ +z~=~v1~+~1\\ +v2~=~z~+~y\\ +return(v2) +} +\end{quote} + +Observe how the first two operations could be constant-folded if the value of +\texttt{x} were known. Let's assume that the value of \texttt{x} can vary, but does so +rarely, i.e. only takes a few different values at runtime. If this is the +case, we can add a hint to promote \texttt{x}, like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{def} \PY{n+nf}{f2}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} + \PY{n}{x} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{z} \PY{o}{=} \PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} + \PY{k}{return} \PY{n}{z} \PY{o}{+} \PY{n}{y} +\end{Verbatim} + +The meaning of this hint is that the tracer should pretend that \texttt{x} is a +constant +in the code that follows. When just running the code, the function has no +effect, as it simply returns its first argument. When tracing, some extra work +is done. Let's assume that this changed function is traced with +the arguments \texttt{4} and \texttt{8}. The trace will be the same, except for one +operation at the beginning: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(x~==~4)\\ +v1~=~x~*~2\\ +z~=~v1~+~1\\ +v2~=~z~+~y\\ +return(v2) +} +\end{quote} + +The promotion is turned into a \texttt{guard} operation in the trace. The guard +captures the value of \texttt{x} as it was at runtime. From the point of view of the +optimizer, this guard is not any different than the one produced by the \texttt{if} +statement in the example above. After the guard, the rest of the trace can +assume that \texttt{x} is equal to \texttt{4}, meaning that the optimizer will turn this +trace into: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(x~==~4)\\ +v2~=~9~+~y\\ +return(v2) +} +\end{quote} + +Notice how the first two arithmetic operations were constant folded. The hope is +that the guard is executed quicker than the multiplication and the addition that +was now optimized away. + +If this trace is executed with values of \texttt{x} other than \texttt{4}, the guard will +fail, and execution will continue in the interpreter. If the guard fails often +enough, a new trace will be started from the guard. This other trace will +capture a different value of \texttt{x}. If it is e.g. \texttt{2}, then the optimized +trace looks like this: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(x~==~2)\\ +v2~=~5~+~y\\ +return(v2) +} +\end{quote} + +This new trace will be attached to the guard instruction of the first trace. If +\texttt{x} takes on even more values, a new trace will eventually be made for all of them, +linking them into a chain. This is clearly not desirable, so we should promote +only variables that don't vary much. However, adding a promotion hint will never produce wrong +results. It might just lead to too much assembler code. + +Promoting integers, as in the examples above, is not used that often. +However, the internals of dynamic language interpreters often +have values that are variable but vary little in the context of parts of a user +program. An example would be the types of variables in a user function. Even +though in principle the argument to a Python function could be any Python type, +in practice the argument types tend to not vary often. Therefore it is possible to +promote the types. In the next blog post I will give a complete example of how +this works. + + +%___________________________________________________________________________ + +\subsection{Declaring New Pure Operations} + +In the last section we saw a way to turn arbitrary variables into constants. All +pure operations on these constants can be constant-folded. This works great for +constant folding of simple types, e.g. integers. Unfortunately, in the context of an +interpreter for a dynamic +language, most operations actually manipulate objects, not simple types. The +operations on objects are often not pure and might even have side-effects. If +one reads a field out of a constant reference to an object this cannot +necessarily be folded away because the object can be mutated. Therefore, another +hint is needed. + +As an example, take the following class: + +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{A}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{=} \PY{n}{x} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n}{y} + + \PY{k}{def} \PY{n+nf}{f}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{val}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{compute}\PY{p}{(}\PY{p}{)} \PY{o}{+} \PY{n}{val} + + \PY{k}{def} \PY{n+nf}{compute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} +\end{Verbatim} + +Tracing the call \texttt{a.f(10)} of some instance of \texttt{A} yields the following +trace (note how the call to \texttt{compute} is inlined): +% +\begin{quote}{\ttfamily \raggedright \noindent +x~=~a.x\\ +v1~=~x~*~2\\ +v2~=~v1~+~1\\ +v3~=~v2~+~val\\ +a.y~=~v3 +} +\end{quote} + +In this case, adding a promote of \texttt{self} in the \texttt{f} method to get rid of the +computation of the first few operations does not help. Even if \texttt{a} is a +constant reference to an object, reading the \texttt{x} field does not necessarily +always yield the same value. To solve this problem, there is another annotation, +which lets the interpreter author communicate invariants to the optimizer. In +this case, she could decide that the \texttt{x} field of instances of \texttt{A} is +immutable, and therefore \texttt{compute} +is a pure function. To communicate this, there is a \texttt{purefunction} decorator. +If the code in \texttt{compute} should be constant-folded away, we would change the +class as follows: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{A}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{=} \PY{n}{x} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n}{y} + + \PY{k}{def} \PY{n+nf}{f}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{val}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{compute}\PY{p}{(}\PY{p}{)} \PY{o}{+} \PY{n}{val} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{compute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} +\end{Verbatim} + +Now the trace will look like this: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(a~==~0xb73984a8)\\ +v1~=~compute(a)\\ +v2~=~v1~+~val\\ +a.y~=~v2 +} +\end{quote} + +Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used +during tracing. The call to \texttt{compute} is not inlined, so that the optimizer +has a chance to see it. Since \texttt{compute} function is marked as pure, and its +argument +is a constant reference, the call will be removed by the optimizer. The final +trace looks like this: +% +\begin{quote}{\ttfamily \raggedright \noindent +guard(a~==~0xb73984a8)\\ +v2~=~9~+~val\\ +a.y~=~v2 +} +\end{quote} + +(assuming that the \texttt{x} field's value is \texttt{4}). + +On the one hand, the \texttt{purefunction} annotation is very powerful. It can be +used to constant-fold arbitrary parts of the computation in the interpreter. +However, the annotation also gives you ample opportunity to mess things up. If a +function is annotated to be pure, but is not really, the optimizer can produce +subtly wrong code. Therefore, a lot of care has to be taken when using this +annotation. + + +%___________________________________________________________________________ + +\subsubsection{Observably Pure Functions} + +Why can't we simply write an analysis to find out that the \texttt{x} fields of the +\texttt{A} instances is immutable and deduce that \texttt{compute} is a pure function, +since it only reads the \texttt{x} field and does not have side effects? This might +be possible in this particular case, but in practice the functions that are +annotated with the \texttt{purefunction} decorator are usually more complex. +The easiest example for this is that of a function that uses memoization to +cache its results. If you analyze this function, it looks like the function has +side effects, because it changes the memoizing dictionary. However, because this side +effect is not externally visible, the function from the outside is pure. This is +a property that is not easily detectable by analysis. Therefore, the purity +of this function needs to be annotated. + + +%___________________________________________________________________________ + +\subsubsection{Immutable Fields} + +One of the most common cases of pure functions is reading immutable +values out of objects. Since this is so common, we have special syntactic sugar +for it. A RPython class can have a class attribute \texttt{\_immutable\_fields\_} set to +a list of strings, listing the fields that cannot be changed. This is equivalent +to using getters and annotating them with \texttt{purefunction}. + + +%___________________________________________________________________________ + +\subsection{Conclusion} + +In this blog post I explained two more hints that can be used in the source code +of the interpreter. They are used to influence what the optimizer does with the +trace. I realize the examples given here are a bit too small, in the next +installment I will give a worked-out example that puts all the pieces together. + +\section{Controlling the Tracing of an Interpreter With Hints, Part 3: Putting Things Together} + +This is part 3 of the series on how to speed up an interpreter written with +PyPy by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control +the extent of tracing}. Part 2 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with_15.html}{influence the optimizer with +promotion and pure functions}. In this post I describe a worked-out example of +a small object model for a dynamic language and how to make it efficient using +the hints described in the previous posts. + + +%___________________________________________________________________________ + +\subsection{A Simple Object Model} + +To implement a dynamic language efficiently, the operations on its objects need +to be fast. Most dynamic languages have object models that are made by using +dictionaries everywhere. Let's look at an example of how the JIT can be made to +optimize such operations. + +For the purpose of this blog post we will use a very simple and bare-bones +object model that just supports very simple classes and instances, without any +inheritance or any fancy features. The model has classes, which contain methods. +Instances have a class. Instances have their own attributes. When looking up an +attribute on an instance, the instances attributes are searched. If the +attribute is not found there, the class' attributes are searched. + +To implement this object model, we could use the following RPython code as part +of the interpreter source code: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{k}{def} \PY{n+nf}{instantiate}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n}{Instance}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + + +\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + + \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{try}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} +\end{Verbatim} + +In this straightforward implementation the methods and attributes are just +stored in dictionaries on the classes/instances. While this object model is very +simple it already contains all the hard parts of Python's object model. Both +instances and classes can have arbitrary fields, and they are changeable at +any time. Moreover, instances can change their class after they have been +created. + +When using this object model in +an interpreter, a huge amount of time will be spent doing lookups in these +dictionaries. To make the language efficient using a tracing JIT, we need to +find a way to get rid of these dictionary lookups somehow. + +Let's assume we trace through code that sums three attributes, such as: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} \PY{o}{+} \PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} \PY{o}{+} \PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\end{Verbatim} + +The trace could look like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{c}{# inst.getattr("a")} +\PY{n}{attributes1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} +\PY{n}{result1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result1} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} + +\PY{c}{# inst.getattr("b")} +\PY{n}{attributes2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} +\PY{n}{v1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{v1} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods1} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} +\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} + +\PY{c}{# inst.getattr("c")} +\PY{n}{attributes3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} +\PY{n}{v3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{v3} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods2} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} +\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} + +\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} +\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} +\end{Verbatim} + +In this example, the attribute \texttt{a} is found on the instance, but the +attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains +five calls to \texttt{dict.get}, which is slow. + + +%___________________________________________________________________________ + +\subsection{Making Instance Attributes Faster Using Maps} + +The first step in making \texttt{getattr} faster in our object model is to optimize +away the dictionary lookups on the instances. The hints we have looked at in the +two earlier blog posts don't seem to help with the current object model. There is +no pure function to be seen, and the instance is not a candidate for promotion, +because there tend to be many instances. + +This is a common problem when trying to apply hints. Often, the interpreter +needs a small rewrite to expose the pure functions and nearly-constant objects +that are implicitly there. In the case of instance fields this rewrite is not +entirely obvious. The basic idea is as follows. In theory instances can have +arbitrary fields. In practice however many instances share their layout (i.e. +their set of keys) with many other instances. + +Therefore it makes sense to factor the layout information out of the instance +implementation into a shared object. This shared layout object is called a +\emph{map}. Maps are an old idea that comes originally from the SELF language. They are +also used by many JavaScript implementations such as V8. I've \href{http://morepypy.blogspot.com/2010/11/efficiently-implementing-python-objects.html}{written about maps +before}, so I won't explain them fully again. + +The rewritten \texttt{Instance} class using maps looks like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:} + \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} + \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)} + \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} + + +\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} + +\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]} + + \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value} + \PY{k}{return} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{try}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} +\end{Verbatim} + +Instances no longer use dictionaries to store their fields. Instead, they have a +reference to a map, which maps field names to indexes into a storage list. The +storage list contains the actual field values. The maps are shared between +objects with the same layout. Therefore they have to be immutable, which means +that their \texttt{getindex} method is a pure function. When a new attribute is added +to an instance, a new map needs to be chosen, which is done with the +\texttt{new\_map\_with\_additional\_attribute} method on the previous map. Now that we have +introduced maps, it is safe to promote the map everywhere, because we assume +that the number of different instance layouts is small. + +With this changed instance implementation, the trace we had above changes to the +following, where \texttt{0xb74af4a8} is the memory address of the Map instance that +has been promoted: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{c}{# inst.getattr("a")} +\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index1} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index1} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} +\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{n}{index1}\PY{p}{]} + +\PY{c}{# inst.getattr("b")} +\PY{n}{map2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map2} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index2} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index2} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods1} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} +\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} + +\PY{c}{# inst.getattr("c")} +\PY{n}{map3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map3} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index3} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index3} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods2} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} +\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} + +\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} +\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} +\end{Verbatim} + +The calls to \texttt{Map.getindex} can be optimized away, because they are calls to +a pure function and they have constant arguments. That means that \texttt{index1/2/3} +are constant and the guards on them can be removed. All but the first guard on +the map will be optimized away too, because the map cannot have changed in +between. The optimized trace looks like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{c}{# inst.getattr("a")} +\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} +\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{l+m+mi}{0}\PY{p}{]} + +\PY{c}{# inst.getattr("b")} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{methods} +\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} + +\PY{c}{# inst.getattr("c")} +\PY{n}{cls2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{methods2} \PY{o}{=} \PY{n}{cls2}\PY{o}{.}\PY{n}{methods} +\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} + +\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} +\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} +\end{Verbatim} + +The index \texttt{0} that is used to read out of the \texttt{storage} array is the result +of the constant-folded \texttt{getindex} call. This trace is already much better than +the original one. Now we are down from five dictionary lookups to just two. + + +%___________________________________________________________________________ + +\subsection{Versioning of Classes} + +Instances were optimized making the assumption that the total number of +Instance layouts is small compared to the number of instances. For classes we +will make an even stronger assumption. We simply assume that it is rare for +classes to change at all. This is not totally reasonable (sometimes classes contain +counters or similar things) but for this simple example it is good enough. + +What we would really like is if the \texttt{Class.find\_method} method were pure. +But it cannot be, because it is always possible to change the class itself. +Every time the class changes, \texttt{find\_method} can potentially return a +new value. + +Therefore, we give every class a version number, which is increased every time a +class gets changed (i.e., the content of the \texttt{methods} dictionary changes). +This means that the result of \texttt{methods.get()} for a given \texttt{(name, +version)} pair will always be the same, i.e. it is a pure operation. To help +the JIT to detect this case, we factor it out in a helper method which is +explicitly marked as \texttt{@purefunction}. The refactored \texttt{Class} looks like +this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{VersionTag}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{pass} + +\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{version} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} +\end{Verbatim} + +What is interesting here is that \texttt{\_find\_method} takes the \texttt{version} +argument but it does not use it at all. Its only purpose is to make the call +pure (because when the version number changes, the result of the call might be +different than the previous one). + +The trace with this new class implementation looks like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{c}{# inst.getattr("a")} +\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index1} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index1} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} +\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{n}{index1}\PY{p}{]} + +\PY{c}{# inst.getattr("b")} +\PY{n}{map2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map2} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index2} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index2} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{guard}\PY{p}{(}\PY{n}{cls1} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} +\PY{n}{version1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{version} +\PY{n}{guard}\PY{p}{(}\PY{n}{version1} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} +\PY{n}{result2} \PY{o}{=} \PY{n}{Class}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{cls}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{,} \PY{n}{version1}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} +\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} + +\PY{c}{# inst.getattr("c")} +\PY{n}{map3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map3} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{index3} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{index3} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} +\PY{n}{cls2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{guard}\PY{p}{(}\PY{n}{cls2} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} +\PY{n}{version2} \PY{o}{=} \PY{n}{cls2}\PY{o}{.}\PY{n}{version} +\PY{n}{guard}\PY{p}{(}\PY{n}{version2} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} +\PY{n}{result3} \PY{o}{=} \PY{n}{Class}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{cls}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{,} \PY{n}{version2}\PY{p}{)} +\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} + +\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} +\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} +\end{Verbatim} + +The calls to \texttt{Class.\_find\_method} can now be optimized away, also the +promotion of the class and the version, except for the first one. The final +optimized trace looks like this: +\begin{Verbatim}[commandchars=\\\{\}] +\PY{c}{# inst.getattr("a")} +\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} +\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} +\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} +\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{l+m+mi}{0}\PY{p}{]} + +\PY{c}{# inst.getattr("b")} +\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} +\PY{n}{guard}\PY{p}{(}\PY{n}{cls1} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} +\PY{n}{version1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{version} +\PY{n}{guard}\PY{p}{(}\PY{n}{version1} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} +\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{l+m+mi}{41} + +\PY{c}{# inst.getattr("c")} +\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{l+m+mi}{17} +\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} +\end{Verbatim} + +The constants \texttt{41} and \texttt{17} are the results of the folding of the +\texttt{\_find\_method`} calls. This final trace is now very good. It no longer performs any +dictionary lookups. Instead it contains several guards. The first guard +checks that the map is still the same. This guard will fail if the same +code is executed with an instance that has another layout. The second guard +checks that the class of \texttt{inst} is still the same. It will fail if trace is +executed with an instance of another class. The third guard checks that the +class did not change since the trace was produced. It will fail if somebody +calls the \texttt{change\_method} method on the class. + + +%___________________________________________________________________________ + +\subsection{Real-World Considerations} + +The techniques used above for the simple object model are used for the object +model of PyPy's Python interpreter too. Since Python's object model is +considerably more complex, some additional work needs to be done. + +The first problem that needs to be solved is that Python supports (multiple) +inheritance. Therefore looking up a method in a class needs to consider the +whole method resolution order. This makes the versioning of classes more +complex. If a class is changed its version changes. At the same time, the +versions of all the classes inheriting from it need to be changed as well, +recursively. This makes class changes expensive, but they should be rare. On the +other hand, a method lookup in a complex class hierarchy is as optimized in the +trace as in our object model here. + +A downside of the versioning of classes that we haven't yet fixed in PyPy, is +that some classes \emph{do} change a lot. An example would be a class that keeps a +counter of how many instances have been created so far. This is very slow right +now, but we have ideas about how to fix it in the future. + +Another optimization is that in practice the shape of an instance is correlated +with its class. In our code above, we allow both to vary independently. +In PyPy's Python interpreter we act somewhat more cleverly. The class of +an instance is not stored on the instance itself, but on the map. This means +that we get one fewer promotion (and thus one fewer guard) in the trace, because the class doesn't need to +be promoted after the map has been. + + +%___________________________________________________________________________ + +\subsection{More General Patterns} + +The techniques we used above to make instance and class lookups faster are +applicable in more general cases than the one we developed them for. A more +abstract view of maps is that of splitting a data-structure into a part that +changes slowly, and a part that changes quickly. In the concrete example of maps +we split the original dictionary into the map (the slow-changing part) and the +storage array (the quick-changing part). All the computation on the +slow-changing part can be constant-folded during tracing so that only the +manipulation of the quick-changing part remains. + +Similarly, versions can be used to constant-fold arbitrary functions of large data +structures. The version needs to be updated carefully every time the result of +this function can change. Therefore this is useful only if the data structure is +expected to change slowly. + + +%___________________________________________________________________________ + +\subsection{Conclusion} + +In this post I showed how to use \texttt{purefunction} and \texttt{promote} to make a +small but still relevant dynamic object model no longer use any dictionary lookups +after tracing. Instead a number of guards are inserted into the +trace to check whether the assumptions about the objects are still true. This +makes operations on objects seriously faster. I plan to write another small post +that shows the speed benefits for PyPy's Python interpreter for exactly these +operations. + +\section{Evaluation} +\label{sect:evaluation} + +\section{Related Work} + +\section{Conclusion and Next Steps} + +\section*{Acknowledgements} + +\bibliographystyle{abbrv} +\bibliography{paper} + +\end{document} diff --git a/talk/icooolps2011/figures/trace-levels.svg b/talk/icooolps2011/figures/trace-levels.svg new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/figures/trace-levels.svg @@ -0,0 +1,849 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + image/svg+xml + + + + + + + + + CPU + + + + Python Interpreter in RPython + + User Program in Python + + + f1 + + + + + + + f2 + + + + f3 + + + + f4 + + + + + g + + + + + + main_loop + + + + + + + string_concat + + + + + + + BUILD_TUPLE + + + + popvalues + + + + + import_helper1 + + + + + Trace for f1 + + + + + + ... + ... + + ... + + ops from main_loop...more ops frommain_loop...ops fromBUILD_TUPLEops frompopvaluesresidual call ...even more opsfrommain_loop...jump to start ... + + diff --git a/talk/icooolps2011/Makefile b/talk/icooolps2011/Makefile new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/Makefile @@ -0,0 +1,13 @@ + +jit-hints.pdf: paper.tex paper.bib + pdflatex paper + bibtex paper + pdflatex paper + pdflatex paper + mv paper.pdf jit-hints.pdf + +view: jit-hints.pdf + evince jit-hints.pdf & + +xpdf: jit-hints.pdf + xpdf jit-hints.pdf & diff --git a/talk/icooolps2011/sig-alternate.cls b/talk/icooolps2011/sig-alternate.cls new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/sig-alternate.cls @@ -0,0 +1,1603 @@ +% SIG-ALTERNATE.CLS - VERSION 2.3 +% "COMPATIBLE" WITH THE "ACM_PROC_ARTICLE-SP.CLS" V3.1SP +% Gerald Murray June 7th. 2007 +% +% ---- Start of 'updates' ---- +% +% To produce Type 1 fonts in the document plus allow for 'normal LaTeX accenting' in the critical areas; +% title, author block, section-heads, confname, etc. etc. +% i.e. the whole purpose of this version update is to NOT resort to 'inelegant accent patches'. +% After much research, three extra .sty packages were added to the the tail (ae, aecompl, aeguill) to solve, +% in particular, the accenting problem(s). We _could_ ask authors (via instructions/sample file) to 'include' these in +% the source .tex file - in the preamble - but if everything is already provided ('behind the scenes' - embedded IN the .cls) +% then this is less work for authors and also makes everything appear 'vanilla'. +% NOTE: all 'patchwork accenting" has been commented out (here) and is no longer 'used' in the sample .tex file (either). +% Gerry June 2007 +% +% Patch for accenting in conference name/location. Gerry May 3rd. 2007 +% Rule widths changed to .5, author count (>6) fixed, roll-back for Type 3 problem. Gerry March 20th. 2007 +% Changes made to 'modernize' the fontnames but esp. for MikTeX users V2.4/2.5 - Nov. 30th. 2006 +% Updated the \email definition to allow for its use inside of 'shared affiliations' - Nov. 30th. 2006 +% Fixed the 'section number depth value' - Nov. 30th. 2006 +% +% Footnotes inside table cells using \minipage (Oct. 2002) +% Georgia fixed bug in sub-sub-section numbering in paragraphs (July 29th. 2002) +% JS/GM fix to vertical spacing before Proofs (July 30th. 2002) +% +% Made the Permission Statement / Conference Info / Copyright Info +% 'user definable' in the source .tex file OR automatic if +% not specified. +% +% Allowance made to switch default fonts between those systems using +% normal/modern font names and those using 'Type 1' or 'Truetype' fonts. +% See LINE NUMBER 255 for details. +% Also provided for enumerated/annotated Corollaries 'surrounded' by +% enumerated Theorems (line 848). +% Gerry November 11th. 1999 +% +% ---- End of 'updates' ---- +% +\def\fileversion{v2.3} % for ACM's tracking purposes +\def\filedate{June 7, 2007} % Gerry Murray's tracking data +\def\docdate {Thursday 7th. June 2007} % Gerry Murray (with deltas to doc} +\usepackage{epsfig} +\usepackage{amssymb} +\usepackage{amsmath} +\usepackage{amsfonts} +% Need this for accents in Arial/Helvetica +%\usepackage[T1]{fontenc} % Gerry March 12, 2007 - causes Type 3 problems (body text) +%\usepackage{textcomp} +% +% SIG-ALTERNATE DOCUMENT STYLE +% G.K.M. Tobin August-October 1999 +% adapted from ARTICLE document style by Ken Traub, Olin Shivers +% also using elements of esub2acm.cls +% HEAVILY MODIFIED, SUBSEQUENTLY, BY GERRY MURRAY 2000 +% ARTICLE DOCUMENT STYLE -- Released 16 March 1988 +% for LaTeX version 2.09 +% Copyright (C) 1988 by Leslie Lamport +% +% +%%% sig-alternate.cls is an 'ALTERNATE' document style for producing +%%% two-column camera-ready pages for ACM conferences. +%%% THIS FILE DOES NOT STRICTLY ADHERE TO THE SIGS (BOARD-ENDORSED) +%%% PROCEEDINGS STYLE. It has been designed to produce a 'tighter' +%%% paper in response to concerns over page budgets. +%%% The main features of this style are: +%%% +%%% 1) Two columns. +%%% 2) Side and top margins of 4.5pc, bottom margin of 6pc, column gutter of +%%% 2pc, hence columns are 20pc wide and 55.5pc tall. (6pc =3D 1in, approx) +%%% 3) First page has title information, and an extra 6pc of space at the +%%% bottom of the first column for the ACM copyright notice. +%%% 4) Text is 9pt on 10pt baselines; titles (except main) are 9pt bold. +%%% +%%% +%%% There are a few restrictions you must observe: +%%% +%%% 1) You cannot change the font size; ACM wants you to use 9pt. +%%% 3) You must start your paper with the \maketitle command. Prior to the +%%% \maketitle you must have \title and \author commands. If you have a +%%% \date command it will be ignored; no date appears on the paper, since +%%% the proceedings will have a date on the front cover. +%%% 4) Marginal paragraphs, tables of contents, lists of figures and tables, +%%% and page headings are all forbidden. +%%% 5) The `figure' environment will produce a figure one column wide; if you +%%% want one that is two columns wide, use `figure*'. +%%% +% +%%% Copyright Space: +%%% This style automatically reserves 1" blank space at the bottom of page 1/ +%%% column 1. This space can optionally be filled with some text using the +%%% \toappear{...} command. If used, this command must be BEFORE the \maketitle +%%% command. If this command is defined AND [preprint] is on, then the +%%% space is filled with the {...} text (at the bottom); otherwise, it is +%%% blank. If you use \toappearbox{...} instead of \toappear{...} then a +%%% box will be drawn around the text (if [preprint] is on). +%%% +%%% A typical usage looks like this: +%%% \toappear{To appear in the Ninth AES Conference on Medievil Lithuanian +%%% Embalming Technique, June 1991, Alfaretta, Georgia.} +%%% This will be included in the preprint, and left out of the conference +%%% version. +%%% +%%% WARNING: +%%% Some dvi-ps converters heuristically allow chars to drift from their +%%% true positions a few pixels. This may be noticeable with the 9pt sans-serif +%%% bold font used for section headers. +%%% You may turn this hackery off via the -e option: +%%% dvips -e 0 foo.dvi >foo.ps +%%% +\typeout{Document Class 'sig-alternate' <7th. June '07>. Modified by G.K.M. Tobin/Gerry Murray} +\typeout{Based in part upon document Style `acmconf' <22 May 89>. Hacked 4/91 by} +\typeout{shivers at cs.cmu.edu, 4/93 by theobald at cs.mcgill.ca} +\typeout{Excerpts were taken from (Journal Style) 'esub2acm.cls'.} +\typeout{****** Bugs/comments/suggestions/technicalities to Gerry Murray -- murray at hq.acm.org ******} +\typeout{Questions on the style, SIGS policies, etc. to Adrienne Griscti griscti at acm.org} +\oddsidemargin 4.5pc +\evensidemargin 4.5pc +\advance\oddsidemargin by -1in % Correct for LaTeX gratuitousness +\advance\evensidemargin by -1in % Correct for LaTeX gratuitousness +\marginparwidth 0pt % Margin pars are not allowed. +\marginparsep 11pt % Horizontal space between outer margin and + % marginal note + + % Top of page: +\topmargin 4.5pc % Nominal distance from top of page to top of + % box containing running head. +\advance\topmargin by -1in % Correct for LaTeX gratuitousness +\headheight 0pt % Height of box containing running head. +\headsep 0pt % Space between running head and text. + % Bottom of page: +\footskip 30pt % Distance from baseline of box containing foot + % to baseline of last line of text. +\@ifundefined{footheight}{\newdimen\footheight}{}% this is for LaTeX2e +\footheight 12pt % Height of box containing running foot. + +%% Must redefine the top margin so there's room for headers and +%% page numbers if you are using the preprint option. Footers +%% are OK as is. Olin. +\advance\topmargin by -37pt % Leave 37pt above text for headers +\headheight 12pt % Height of box containing running head. +\headsep 25pt % Space between running head and text. + +\textheight 666pt % 9 1/4 column height +\textwidth 42pc % Width of text line. + % For two-column mode: +\columnsep 2pc % Space between columns +\columnseprule 0pt % Width of rule between columns. +\hfuzz 1pt % Allow some variation in column width, otherwise it's + % too hard to typeset in narrow columns. + +\footnotesep 5.6pt % Height of strut placed at the beginning of every + % footnote =3D height of normal \footnotesize strut, + % so no extra space between footnotes. + +\skip\footins 8.1pt plus 4pt minus 2pt % Space between last line of text and + % top of first footnote. +\floatsep 11pt plus 2pt minus 2pt % Space between adjacent floats moved + % to top or bottom of text page. +\textfloatsep 18pt plus 2pt minus 4pt % Space between main text and floats + % at top or bottom of page. +\intextsep 11pt plus 2pt minus 2pt % Space between in-text figures and + % text. +\@ifundefined{@maxsep}{\newdimen\@maxsep}{}% this is for LaTeX2e +\@maxsep 18pt % The maximum of \floatsep, + % \textfloatsep and \intextsep (minus + % the stretch and shrink). +\dblfloatsep 11pt plus 2pt minus 2pt % Same as \floatsep for double-column + % figures in two-column mode. +\dbltextfloatsep 18pt plus 2pt minus 4pt% \textfloatsep for double-column + % floats. +\@ifundefined{@dblmaxsep}{\newdimen\@dblmaxsep}{}% this is for LaTeX2e +\@dblmaxsep 18pt % The maximum of \dblfloatsep and + % \dbltexfloatsep. +\@fptop 0pt plus 1fil % Stretch at top of float page/column. (Must be + % 0pt plus ...) +\@fpsep 8pt plus 2fil % Space between floats on float page/column. +\@fpbot 0pt plus 1fil % Stretch at bottom of float page/column. (Must be + % 0pt plus ... ) +\@dblfptop 0pt plus 1fil % Stretch at top of float page. (Must be 0pt plus ...) +\@dblfpsep 8pt plus 2fil % Space between floats on float page. +\@dblfpbot 0pt plus 1fil % Stretch at bottom of float page. (Must be + % 0pt plus ... ) +\marginparpush 5pt % Minimum vertical separation between two marginal + % notes. + +\parskip 0pt plus 1pt % Extra vertical space between paragraphs. +\parindent 9pt % GM July 2000 / was 0pt - width of paragraph indentation. +\partopsep 2pt plus 1pt minus 1pt% Extra vertical space, in addition to + % \parskip and \topsep, added when user + % leaves blank line before environment. + +\@lowpenalty 51 % Produced by \nopagebreak[1] or \nolinebreak[1] +\@medpenalty 151 % Produced by \nopagebreak[2] or \nolinebreak[2] +\@highpenalty 301 % Produced by \nopagebreak[3] or \nolinebreak[3] + +\@beginparpenalty -\@lowpenalty % Before a list or paragraph environment. +\@endparpenalty -\@lowpenalty % After a list or paragraph environment. +\@itempenalty -\@lowpenalty % Between list items. + +\@namedef{ds at 10pt}{\@latexerr{The `10pt' option is not allowed in the `acmconf' + document style.}\@eha} +\@namedef{ds at 11pt}{\@latexerr{The `11pt' option is not allowed in the `acmconf' + document style.}\@eha} +\@namedef{ds at 12pt}{\@latexerr{The `12pt' option is not allowed in the `acmconf' + document style.}\@eha} + +\@options + +\lineskip 2pt % \lineskip is 1pt for all font sizes. +\normallineskip 2pt +\def\baselinestretch{1} + +\abovedisplayskip 9pt plus2pt minus4.5pt% +\belowdisplayskip \abovedisplayskip +\abovedisplayshortskip \z@ plus3pt% +\belowdisplayshortskip 5.4pt plus3pt minus3pt% +\let\@listi\@listI % Setting of \@listi added 9 Jun 87 + +\def\small{\@setsize\small{9pt}\viiipt\@viiipt +\abovedisplayskip 7.6pt plus 3pt minus 4pt% +\belowdisplayskip \abovedisplayskip +\abovedisplayshortskip \z@ plus2pt% +\belowdisplayshortskip 3.6pt plus2pt minus 2pt +\def\@listi{\leftmargin\leftmargini %% Added 22 Dec 87 +\topsep 4pt plus 2pt minus 2pt\parsep 2pt plus 1pt minus 1pt +\itemsep \parsep}} + +\def\footnotesize{\@setsize\footnotesize{9pt}\ixpt\@ixpt +\abovedisplayskip 6.4pt plus 2pt minus 4pt% +\belowdisplayskip \abovedisplayskip +\abovedisplayshortskip \z@ plus 1pt% +\belowdisplayshortskip 2.7pt plus 1pt minus 2pt +\def\@listi{\leftmargin\leftmargini %% Added 22 Dec 87 +\topsep 3pt plus 1pt minus 1pt\parsep 2pt plus 1pt minus 1pt +\itemsep \parsep}} + +\newcount\aucount +\newcount\originalaucount +\newdimen\auwidth +\auwidth=\textwidth +\newdimen\auskip +\newcount\auskipcount +\newdimen\auskip +\global\auskip=1pc +\newdimen\allauboxes +\allauboxes=\auwidth +\newtoks\addauthors +\newcount\addauflag +\global\addauflag=0 %Haven't shown additional authors yet + +\newtoks\subtitletext +\gdef\subtitle#1{\subtitletext={#1}} + +\gdef\additionalauthors#1{\addauthors={#1}} + +\gdef\numberofauthors#1{\global\aucount=#1 +\ifnum\aucount>3\global\originalaucount=\aucount \global\aucount=3\fi %g} % 3 OK - Gerry March 2007 +\global\auskipcount=\aucount\global\advance\auskipcount by 1 +\global\multiply\auskipcount by 2 +\global\multiply\auskip by \auskipcount +\global\advance\auwidth by -\auskip +\global\divide\auwidth by \aucount} + +% \and was modified to count the number of authors. GKMT 12 Aug 1999 +\def\alignauthor{% % \begin{tabular} +\end{tabular}% + \begin{tabular}[t]{p{\auwidth}}\centering}% + +% *** NOTE *** NOTE *** NOTE *** NOTE *** +% If you have 'font problems' then you may need +% to change these, e.g. 'arialb' instead of "arialbd". +% Gerry Murray 11/11/1999 +% *** OR ** comment out block A and activate block B or vice versa. +% ********************************************** +% +% -- Start of block A -- (Type 1 or Truetype fonts) +%\newfont{\secfnt}{timesbd at 12pt} % was timenrb originally - now is timesbd +%\newfont{\secit}{timesbi at 12pt} %13 Jan 00 gkmt +%\newfont{\subsecfnt}{timesi at 11pt} % was timenrri originally - now is timesi +%\newfont{\subsecit}{timesbi at 11pt} % 13 Jan 00 gkmt -- was times changed to timesbi gm 2/4/2000 +% % because "normal" is italic, "italic" is Roman +%\newfont{\ttlfnt}{arialbd at 18pt} % was arialb originally - now is arialbd +%\newfont{\ttlit}{arialbi at 18pt} % 13 Jan 00 gkmt +%\newfont{\subttlfnt}{arial at 14pt} % was arialr originally - now is arial +%\newfont{\subttlit}{ariali at 14pt} % 13 Jan 00 gkmt +%\newfont{\subttlbf}{arialbd at 14pt} % 13 Jan 00 gkmt +%\newfont{\aufnt}{arial at 12pt} % was arialr originally - now is arial +%\newfont{\auit}{ariali at 12pt} % 13 Jan 00 gkmt +%\newfont{\affaddr}{arial at 10pt} % was arialr originally - now is arial +%\newfont{\affaddrit}{ariali at 10pt} %13 Jan 00 gkmt +%\newfont{\eaddfnt}{arial at 12pt} % was arialr originally - now is arial +%\newfont{\ixpt}{times at 9pt} % was timenrr originally - now is times +%\newfont{\confname}{timesi at 8pt} % was timenrri - now is timesi +%\newfont{\crnotice}{times at 8pt} % was timenrr originally - now is times +%\newfont{\ninept}{times at 9pt} % was timenrr originally - now is times + +% ********************************************* +% -- End of block A -- +% +% +% -- Start of block B -- UPDATED FONT NAMES +% ********************************************* +% Gerry Murray 11/30/2006 +% ********************************************* +\newfont{\secfnt}{ptmb8t at 12pt} +\newfont{\secit}{ptmbi8t at 12pt} %13 Jan 00 gkmt +\newfont{\subsecfnt}{ptmri8t at 11pt} +\newfont{\subsecit}{ptmbi8t at 11pt} % +\newfont{\ttlfnt}{phvb8t at 18pt} +\newfont{\ttlit}{phvbo8t at 18pt} % GM 2/4/2000 +\newfont{\subttlfnt}{phvr8t at 14pt} +\newfont{\subttlit}{phvro8t at 14pt} % GM 2/4/2000 +\newfont{\subttlbf}{phvb8t at 14pt} % 13 Jan 00 gkmt +\newfont{\aufnt}{phvr8t at 12pt} +\newfont{\auit}{phvro8t at 12pt} % GM 2/4/2000 +\newfont{\affaddr}{phvr8t at 10pt} +\newfont{\affaddrit}{phvro8t at 10pt} % GM 2/4/2000 +\newfont{\eaddfnt}{phvr8t at 12pt} +\newfont{\ixpt}{ptmr8t at 9pt} +\newfont{\confname}{ptmri8t at 8pt} +\newfont{\crnotice}{ptmr8t at 8pt} +\newfont{\ninept}{ptmr8t at 9pt} +% +++++++++++++++++++++++++++++++++++++++++++++ +% -- End of block B -- + +%\def\email#1{{{\eaddfnt{\vskip 4pt#1}}}} +% If we have an email, inside a "shared affiliation" then we need the following instead +\def\email#1{{{\eaddfnt{\par #1}}}} % revised - GM - 11/30/2006 + +\def\addauthorsection{\ifnum\originalaucount>6 % was 3 - Gerry March 2007 + \section{Additional Authors}\the\addauthors + \fi} + +\newcount\savesection +\newcount\sectioncntr +\global\sectioncntr=1 + +\setcounter{secnumdepth}{3} + +\def\appendix{\par +\section*{APPENDIX} +\setcounter{section}{0} + \setcounter{subsection}{0} + \def\thesection{\Alph{section}} } + +\leftmargini 22.5pt +\leftmarginii 19.8pt % > \labelsep + width of '(m)' +\leftmarginiii 16.8pt % > \labelsep + width of 'vii.' +\leftmarginiv 15.3pt % > \labelsep + width of 'M.' +\leftmarginv 9pt +\leftmarginvi 9pt + +\leftmargin\leftmargini +\labelsep 4.5pt +\labelwidth\leftmargini\advance\labelwidth-\labelsep + +\def\@listI{\leftmargin\leftmargini \parsep 3.6pt plus 2pt minus 1pt% +\topsep 7.2pt plus 2pt minus 4pt% +\itemsep 3.6pt plus 2pt minus 1pt} + +\let\@listi\@listI +\@listi + +\def\@listii{\leftmargin\leftmarginii + \labelwidth\leftmarginii\advance\labelwidth-\labelsep + \topsep 3.6pt plus 2pt minus 1pt + \parsep 1.8pt plus 0.9pt minus 0.9pt + \itemsep \parsep} + +\def\@listiii{\leftmargin\leftmarginiii + \labelwidth\leftmarginiii\advance\labelwidth-\labelsep + \topsep 1.8pt plus 0.9pt minus 0.9pt + \parsep \z@ \partopsep 1pt plus 0pt minus 1pt + \itemsep \topsep} + +\def\@listiv{\leftmargin\leftmarginiv + \labelwidth\leftmarginiv\advance\labelwidth-\labelsep} + +\def\@listv{\leftmargin\leftmarginv + \labelwidth\leftmarginv\advance\labelwidth-\labelsep} + +\def\@listvi{\leftmargin\leftmarginvi + \labelwidth\leftmarginvi\advance\labelwidth-\labelsep} + +\def\labelenumi{\theenumi.} +\def\theenumi{\arabic{enumi}} + +\def\labelenumii{(\theenumii)} +\def\theenumii{\alph{enumii}} +\def\p at enumii{\theenumi} + +\def\labelenumiii{\theenumiii.} +\def\theenumiii{\roman{enumiii}} +\def\p at enumiii{\theenumi(\theenumii)} + +\def\labelenumiv{\theenumiv.} +\def\theenumiv{\Alph{enumiv}} +\def\p at enumiv{\p at enumiii\theenumiii} + +\def\labelitemi{$\bullet$} +\def\labelitemii{\bf --} +\def\labelitemiii{$\ast$} +\def\labelitemiv{$\cdot$} + +\def\verse{\let\\=\@centercr + \list{}{\itemsep\z@ \itemindent -1.5em\listparindent \itemindent + \rightmargin\leftmargin\advance\leftmargin 1.5em}\item[]} +\let\endverse\endlist + +\def\quotation{\list{}{\listparindent 1.5em + \itemindent\listparindent + \rightmargin\leftmargin \parsep 0pt plus 1pt}\item[]} +\let\endquotation=\endlist + +\def\quote{\list{}{\rightmargin\leftmargin}\item[]} +\let\endquote=\endlist + +\def\descriptionlabel#1{\hspace\labelsep \bf #1} +\def\description{\list{}{\labelwidth\z@ \itemindent-\leftmargin + \let\makelabel\descriptionlabel}} + +\let\enddescription\endlist + +\def\theequation{\arabic{equation}} + +\arraycolsep 4.5pt % Half the space between columns in an array environment. +\tabcolsep 5.4pt % Half the space between columns in a tabular environment. +\arrayrulewidth .5pt % Width of rules in array and tabular environment. % (was .4) updated Gerry March 20 2007 +\doublerulesep 1.8pt % Space between adjacent rules in array or tabular env. + +\tabbingsep \labelsep % Space used by the \' command. (See LaTeX manual.) + +\skip\@mpfootins =\skip\footins + +\fboxsep =2.7pt % Space left between box and text by \fbox and \framebox. +\fboxrule =.5pt % Width of rules in box made by \fbox and \framebox. % (was .4) updated Gerry March 20 2007 + +\def\thepart{\Roman{part}} % Roman numeral part numbers. +\def\thesection {\arabic{section}} +\def\thesubsection {\thesection.\arabic{subsection}} +%\def\thesubsubsection {\thesubsection.\arabic{subsubsection}} % GM 7/30/2002 +%\def\theparagraph {\thesubsubsection.\arabic{paragraph}} % GM 7/30/2002 +\def\thesubparagraph {\theparagraph.\arabic{subparagraph}} + +\def\@pnumwidth{1.55em} +\def\@tocrmarg {2.55em} +\def\@dotsep{4.5} +\setcounter{tocdepth}{3} + +\def\tableofcontents{\@latexerr{\tableofcontents: Tables of contents are not + allowed in the `acmconf' document style.}\@eha} + +\def\l at part#1#2{\addpenalty{\@secpenalty} + \addvspace{2.25em plus 1pt} % space above part line + \begingroup + \@tempdima 3em % width of box holding part number, used by + \parindent \z@ \rightskip \@pnumwidth %% \numberline + \parfillskip -\@pnumwidth + {\large \bf % set line in \large boldface + \leavevmode % TeX command to enter horizontal mode. + #1\hfil \hbox to\@pnumwidth{\hss #2}}\par + \nobreak % Never break after part entry + \endgroup} + +\def\l at section#1#2{\addpenalty{\@secpenalty} % good place for page break + \addvspace{1.0em plus 1pt} % space above toc entry + \@tempdima 1.5em % width of box holding section number + \begingroup + \parindent \z@ \rightskip \@pnumwidth + \parfillskip -\@pnumwidth + \bf % Boldface. + \leavevmode % TeX command to enter horizontal mode. + \advance\leftskip\@tempdima %% added 5 Feb 88 to conform to + \hskip -\leftskip %% 25 Jan 88 change to \numberline + #1\nobreak\hfil \nobreak\hbox to\@pnumwidth{\hss #2}\par + \endgroup} + + +\def\l at subsection{\@dottedtocline{2}{1.5em}{2.3em}} +\def\l at subsubsection{\@dottedtocline{3}{3.8em}{3.2em}} +\def\l at paragraph{\@dottedtocline{4}{7.0em}{4.1em}} +\def\l at subparagraph{\@dottedtocline{5}{10em}{5em}} + +\def\listoffigures{\@latexerr{\listoffigures: Lists of figures are not + allowed in the `acmconf' document style.}\@eha} + +\def\l at figure{\@dottedtocline{1}{1.5em}{2.3em}} + +\def\listoftables{\@latexerr{\listoftables: Lists of tables are not + allowed in the `acmconf' document style.}\@eha} +\let\l at table\l at figure + +\def\footnoterule{\kern-3\p@ + \hrule width .5\columnwidth % (was .4) updated Gerry March 20 2007 + \kern 2.6\p@} % The \hrule has default height of .4pt % (was .4) updated Gerry March 20 2007 +% ------ +\long\def\@makefntext#1{\noindent +%\hbox to .5em{\hss$^{\@thefnmark}$}#1} % original +\hbox to .5em{\hss\textsuperscript{\@thefnmark}}#1} % C. Clifton / GM Oct. 2nd. 2002 +% ------- + +\long\def\@maketntext#1{\noindent +#1} + +\long\def\@maketitlenotetext#1#2{\noindent + \hbox to 1.8em{\hss$^{#1}$}#2} + +\setcounter{topnumber}{2} +\def\topfraction{.7} +\setcounter{bottomnumber}{1} +\def\bottomfraction{.3} +\setcounter{totalnumber}{3} +\def\textfraction{.2} +\def\floatpagefraction{.5} +\setcounter{dbltopnumber}{2} +\def\dbltopfraction{.7} +\def\dblfloatpagefraction{.5} + +% +\long\def\@makecaption#1#2{ + \vskip \baselineskip + \setbox\@tempboxa\hbox{\textbf{#1: #2}} + \ifdim \wd\@tempboxa >\hsize % IF longer than one line: + \textbf{#1: #2}\par % THEN set as ordinary paragraph. + \else % ELSE center. + \hbox to\hsize{\hfil\box\@tempboxa\hfil}\par + \fi} + +% + +\long\def\@makecaption#1#2{ + \vskip 10pt + \setbox\@tempboxa\hbox{\textbf{#1: #2}} + \ifdim \wd\@tempboxa >\hsize % IF longer than one line: + \textbf{#1: #2}\par % THEN set as ordinary paragraph. + \else % ELSE center. + \hbox to\hsize{\hfil\box\@tempboxa\hfil} + \fi} + +\@ifundefined{figure}{\newcounter {figure}} % this is for LaTeX2e + +\def\fps at figure{tbp} +\def\ftype at figure{1} +\def\ext at figure{lof} +\def\fnum at figure{Figure \thefigure} +\def\figure{\@float{figure}} +\let\endfigure\end at float +\@namedef{figure*}{\@dblfloat{figure}} +\@namedef{endfigure*}{\end at dblfloat} + +\@ifundefined{table}{\newcounter {table}} % this is for LaTeX2e + +\def\fps at table{tbp} +\def\ftype at table{2} +\def\ext at table{lot} +\def\fnum at table{Table \thetable} +\def\table{\@float{table}} +\let\endtable\end at float +\@namedef{table*}{\@dblfloat{table}} +\@namedef{endtable*}{\end at dblfloat} + +\newtoks\titleboxnotes +\newcount\titleboxnoteflag + +\def\maketitle{\par + \begingroup + \def\thefootnote{\fnsymbol{footnote}} + \def\@makefnmark{\hbox + to 0pt{$^{\@thefnmark}$\hss}} + \twocolumn[\@maketitle] +\@thanks + \endgroup + \setcounter{footnote}{0} + \let\maketitle\relax + \let\@maketitle\relax + \gdef\@thanks{}\gdef\@author{}\gdef\@title{}\gdef\@subtitle{}\let\thanks\relax + \@copyrightspace} + +%% CHANGES ON NEXT LINES +\newif\if at ll % to record which version of LaTeX is in use + +\expandafter\ifx\csname LaTeXe\endcsname\relax % LaTeX2.09 is used +\else% LaTeX2e is used, so set ll to true +\global\@lltrue +\fi + +\if at ll + \NeedsTeXFormat{LaTeX2e} + \ProvidesClass{sig-alternate} [2007/06/07 - V2.3 - based on acmproc.cls V1.3 ] + \RequirePackage{latexsym}% QUERY: are these two really needed? + \let\dooptions\ProcessOptions +\else + \let\dooptions\@options +\fi +%% END CHANGES + +\def\@height{height} +\def\@width{width} +\def\@minus{minus} +\def\@plus{plus} +\def\hb at xt@{\hbox to} +\newif\if at faircopy +\@faircopyfalse +\def\ds at faircopy{\@faircopytrue} + +\def\ds at preprint{\@faircopyfalse} + +\@twosidetrue +\@mparswitchtrue +\def\ds at draft{\overfullrule 5\p@} +%% CHANGE ON NEXT LINE +\dooptions + +\lineskip \p@ +\normallineskip \p@ +\def\baselinestretch{1} +\def\@ptsize{0} %needed for amssymbols.sty + +%% CHANGES ON NEXT LINES +\if at ll% allow use of old-style font change commands in LaTeX2e +\@maxdepth\maxdepth +% +\DeclareOldFontCommand{\rm}{\ninept\rmfamily}{\mathrm} +\DeclareOldFontCommand{\sf}{\normalfont\sffamily}{\mathsf} +\DeclareOldFontCommand{\tt}{\normalfont\ttfamily}{\mathtt} +\DeclareOldFontCommand{\bf}{\normalfont\bfseries}{\mathbf} +\DeclareOldFontCommand{\it}{\normalfont\itshape}{\mathit} +\DeclareOldFontCommand{\sl}{\normalfont\slshape}{\@nomath\sl} +\DeclareOldFontCommand{\sc}{\normalfont\scshape}{\@nomath\sc} +\DeclareRobustCommand*{\cal}{\@fontswitch{\relax}{\mathcal}} +\DeclareRobustCommand*{\mit}{\@fontswitch{\relax}{\mathnormal}} +\fi +% +\if at ll + \renewcommand{\rmdefault}{cmr} % was 'ttm' +% Note! I have also found 'mvr' to work ESPECIALLY well. +% Gerry - October 1999 +% You may need to change your LV1times.fd file so that sc is +% mapped to cmcsc - -for smallcaps -- that is if you decide +% to change {cmr} to {times} above. (Not recommended) + \renewcommand{\@ptsize}{} + \renewcommand{\normalsize}{% + \@setfontsize\normalsize\@ixpt{10.5\p@}%\ninept% + \abovedisplayskip 6\p@ \@plus2\p@ \@minus\p@ + \belowdisplayskip \abovedisplayskip + \abovedisplayshortskip 6\p@ \@minus 3\p@ + \belowdisplayshortskip 6\p@ \@minus 3\p@ + \let\@listi\@listI + } +\else + \def\@normalsize{%changed next to 9 from 10 + \@setsize\normalsize{9\p@}\ixpt\@ixpt + \abovedisplayskip 6\p@ \@plus2\p@ \@minus\p@ + \belowdisplayskip \abovedisplayskip + \abovedisplayshortskip 6\p@ \@minus 3\p@ + \belowdisplayshortskip 6\p@ \@minus 3\p@ + \let\@listi\@listI + }% +\fi +\if at ll + \newcommand\scriptsize{\@setfontsize\scriptsize\@viipt{8\p@}} + \newcommand\tiny{\@setfontsize\tiny\@vpt{6\p@}} + \newcommand\large{\@setfontsize\large\@xiipt{14\p@}} + \newcommand\Large{\@setfontsize\Large\@xivpt{18\p@}} + \newcommand\LARGE{\@setfontsize\LARGE\@xviipt{20\p@}} + \newcommand\huge{\@setfontsize\huge\@xxpt{25\p@}} + \newcommand\Huge{\@setfontsize\Huge\@xxvpt{30\p@}} +\else + \def\scriptsize{\@setsize\scriptsize{8\p@}\viipt\@viipt} + \def\tiny{\@setsize\tiny{6\p@}\vpt\@vpt} + \def\large{\@setsize\large{14\p@}\xiipt\@xiipt} + \def\Large{\@setsize\Large{18\p@}\xivpt\@xivpt} + \def\LARGE{\@setsize\LARGE{20\p@}\xviipt\@xviipt} + \def\huge{\@setsize\huge{25\p@}\xxpt\@xxpt} + \def\Huge{\@setsize\Huge{30\p@}\xxvpt\@xxvpt} +\fi +\normalsize + +% make aubox hsize/number of authors up to 3, less gutter +% then showbox gutter showbox gutter showbox -- GKMT Aug 99 +\newbox\@acmtitlebox +\def\@maketitle{\newpage + \null + \setbox\@acmtitlebox\vbox{% +\baselineskip 20pt +\vskip 2em % Vertical space above title. + \begin{center} + {\ttlfnt \@title\par} % Title set in 18pt Helvetica (Arial) bold size. + \vskip 1.5em % Vertical space after title. +%This should be the subtitle. +{\subttlfnt \the\subtitletext\par}\vskip 1.25em%\fi + {\baselineskip 16pt\aufnt % each author set in \12 pt Arial, in a + \lineskip .5em % tabular environment + \begin{tabular}[t]{c}\@author + \end{tabular}\par} + \vskip 1.5em % Vertical space after author. + \end{center}} + \dimen0=\ht\@acmtitlebox + \advance\dimen0 by -12.75pc\relax % Increased space for title box -- KBT + \unvbox\@acmtitlebox + \ifdim\dimen0<0.0pt\relax\vskip-\dimen0\fi} + + +\newcount\titlenotecount +\global\titlenotecount=0 +\newtoks\tntoks +\newtoks\tntokstwo +\newtoks\tntoksthree +\newtoks\tntoksfour +\newtoks\tntoksfive + +\def\abstract{ +\ifnum\titlenotecount>0 % was =1 + \insert\footins{% + \reset at font\footnotesize + \interlinepenalty\interfootnotelinepenalty + \splittopskip\footnotesep + \splitmaxdepth \dp\strutbox \floatingpenalty \@MM + \hsize\columnwidth \@parboxrestore + \protected at edef\@currentlabel{% + }% + \color at begingroup +\ifnum\titlenotecount=1 + \@maketntext{% + \raisebox{4pt}{$\ast$}\rule\z@\footnotesep\ignorespaces\the\tntoks\@finalstrut\strutbox}% +\fi +\ifnum\titlenotecount=2 + \@maketntext{% + \raisebox{4pt}{$\ast$}\rule\z@\footnotesep\ignorespaces\the\tntoks\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\dagger$}\rule\z@\footnotesep\ignorespaces\the\tntokstwo\@finalstrut\strutbox}% +\fi +\ifnum\titlenotecount=3 + \@maketntext{% + \raisebox{4pt}{$\ast$}\rule\z@\footnotesep\ignorespaces\the\tntoks\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\dagger$}\rule\z@\footnotesep\ignorespaces\the\tntokstwo\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\ddagger$}\rule\z@\footnotesep\ignorespaces\the\tntoksthree\@finalstrut\strutbox}% +\fi +\ifnum\titlenotecount=4 + \@maketntext{% + \raisebox{4pt}{$\ast$}\rule\z@\footnotesep\ignorespaces\the\tntoks\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\dagger$}\rule\z@\footnotesep\ignorespaces\the\tntokstwo\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\ddagger$}\rule\z@\footnotesep\ignorespaces\the\tntoksthree\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\S$}\rule\z@\footnotesep\ignorespaces\the\tntoksfour\@finalstrut\strutbox}% +\fi +\ifnum\titlenotecount=5 + \@maketntext{% + \raisebox{4pt}{$\ast$}\rule\z@\footnotesep\ignorespaces\the\tntoks\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\dagger$}\rule\z@\footnotesep\ignorespaces\the\tntokstwo\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\ddagger$}\rule\z@\footnotesep\ignorespaces\the\tntoksthree\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\S$}\rule\z@\footnotesep\ignorespaces\the\tntoksfour\par\@finalstrut\strutbox}% +\@maketntext{% + \raisebox{4pt}{$\P$}\rule\z@\footnotesep\ignorespaces\the\tntoksfive\@finalstrut\strutbox}% +\fi + \color at endgroup} %g} +\fi +\setcounter{footnote}{0} +\section*{ABSTRACT}\normalsize%\ninept +} + +\def\endabstract{\if at twocolumn\else\endquotation\fi} + +\def\keywords{\if at twocolumn +\section*{Keywords} +\else \small +\quotation +\fi} + +\def\terms{\if at twocolumn +\section*{General Terms} +\else \small +\quotation +\fi} + +% -- Classification needs to be a bit smart due to optionals - Gerry/Georgia November 2nd. 1999 +\newcount\catcount +\global\catcount=1 + +\def\category#1#2#3{% +\ifnum\catcount=1 +\section*{Categories and Subject Descriptors} +\advance\catcount by 1\else{\unskip; }\fi + \@ifnextchar [{\@category{#1}{#2}{#3}}{\@category{#1}{#2}{#3}[]}% +} + +\def\@category#1#2#3[#4]{% + \begingroup + \let\and\relax + #1 [\textbf{#2}]% + \if!#4!% + \if!#3!\else : #3\fi + \else + :\space + \if!#3!\else #3\kern\z at ---\hskip\z@\fi + \textit{#4}% + \fi + \endgroup +} +% + +%%% This section (written by KBT) handles the 1" box in the lower left +%%% corner of the left column of the first page by creating a picture, +%%% and inserting the predefined string at the bottom (with a negative +%%% displacement to offset the space allocated for a non-existent +%%% caption). +%%% +\newtoks\copyrightnotice +\def\ftype at copyrightbox{8} +\def\@copyrightspace{ +\@float{copyrightbox}[b] +\begin{center} +\setlength{\unitlength}{1pc} +\begin{picture}(20,6) %Space for copyright notice +\put(0,-0.95){\crnotice{\@toappear}} +\end{picture} +\end{center} +\end at float} + +\def\@toappear{} % Default setting blank - commands below change this. +\long\def\toappear#1{\def\@toappear{\parbox[b]{20pc}{\baselineskip 9pt#1}}} +\def\toappearbox#1{\def\@toappear{\raisebox{5pt}{\framebox[20pc]{\parbox[b]{19pc}{#1}}}}} + +\newtoks\conf +\newtoks\confinfo +\def\conferenceinfo#1#2{\global\conf={#1}\global\confinfo{#2}} + + +\def\marginpar{\@latexerr{The \marginpar command is not allowed in the + `acmconf' document style.}\@eha} + +\mark{{}{}} % Initializes TeX's marks + +\def\today{\ifcase\month\or + January\or February\or March\or April\or May\or June\or + July\or August\or September\or October\or November\or December\fi + \space\number\day, \number\year} + +\def\@begintheorem#1#2{% + \parskip 0pt % GM July 2000 (for tighter spacing) + \trivlist + \item[% + \hskip 10\p@ + \hskip \labelsep + {{\sc #1}\hskip 5\p@\relax#2.}% + ] + \it +} +\def\@opargbegintheorem#1#2#3{% + \parskip 0pt % GM July 2000 (for tighter spacing) + \trivlist + \item[% + \hskip 10\p@ + \hskip \labelsep + {\sc #1\ #2\ % This mod by Gerry to enumerate corollaries + \setbox\@tempboxa\hbox{(#3)} % and bracket the 'corollary title' + \ifdim \wd\@tempboxa>\z@ % and retain the correct numbering of e.g. theorems + \hskip 5\p@\relax % if they occur 'around' said corollaries. + \box\@tempboxa % Gerry - Nov. 1999. + \fi.}% + ] + \it +} +\newif\if at qeded +\global\@qededfalse + +% -- original +%\def\proof{% +% \vspace{-\parskip} % GM July 2000 (for tighter spacing) +% \global\@qededfalse +% \@ifnextchar[{\@xproof}{\@proof}% +%} +% -- end of original + +% (JSS) Fix for vertical spacing bug - Gerry Murray July 30th. 2002 +\def\proof{% +\vspace{-\lastskip}\vspace{-\parsep}\penalty-51% +\global\@qededfalse +\@ifnextchar[{\@xproof}{\@proof}% +} + +\def\endproof{% + \if at qeded\else\qed\fi + \endtrivlist +} +\def\@proof{% + \trivlist + \item[% + \hskip 10\p@ + \hskip \labelsep + {\sc Proof.}% + ] + \ignorespaces +} +\def\@xproof[#1]{% + \trivlist + \item[\hskip 10\p@\hskip \labelsep{\sc Proof #1.}]% + \ignorespaces +} +\def\qed{% + \unskip + \kern 10\p@ + \begingroup + \unitlength\p@ + \linethickness{.4\p@}% + \framebox(6,6){}% + \endgroup + \global\@qededtrue +} + +\def\newdef#1#2{% + \expandafter\@ifdefinable\csname #1\endcsname + {\@definecounter{#1}% + \expandafter\xdef\csname the#1\endcsname{\@thmcounter{#1}}% + \global\@namedef{#1}{\@defthm{#1}{#2}}% + \global\@namedef{end#1}{\@endtheorem}% + }% +} +\def\@defthm#1#2{% + \refstepcounter{#1}% + \@ifnextchar[{\@ydefthm{#1}{#2}}{\@xdefthm{#1}{#2}}% +} +\def\@xdefthm#1#2{% + \@begindef{#2}{\csname the#1\endcsname}% + \ignorespaces +} +\def\@ydefthm#1#2[#3]{% + \trivlist + \item[% + \hskip 10\p@ + \hskip \labelsep + {\it #2% + \savebox\@tempboxa{#3}% + \ifdim \wd\@tempboxa>\z@ + \ \box\@tempboxa + \fi.% + }]% + \ignorespaces +} +\def\@begindef#1#2{% + \trivlist + \item[% + \hskip 10\p@ + \hskip \labelsep + {\it #1\ \rm #2.}% + ]% +} +\def\theequation{\arabic{equation}} + +\newcounter{part} +\newcounter{section} +\newcounter{subsection}[section] +\newcounter{subsubsection}[subsection] +\newcounter{paragraph}[subsubsection] +\def\thepart{\Roman{part}} +\def\thesection{\arabic{section}} +\def\thesubsection{\thesection.\arabic{subsection}} +\def\thesubsubsection{\thesubsection.\arabic{subsubsection}} %removed \subsecfnt 29 July 2002 gkmt +\def\theparagraph{\thesubsubsection.\arabic{paragraph}} %removed \subsecfnt 29 July 2002 gkmt +\newif\if at uchead +\@ucheadfalse + +%% CHANGES: NEW NOTE +%% NOTE: OK to use old-style font commands below, since they were +%% suitably redefined for LaTeX2e +%% END CHANGES +\setcounter{secnumdepth}{3} +\def\part{% + \@startsection{part}{9}{\z@}{-10\p@ \@plus -4\p@ \@minus -2\p@} + {4\p@}{\normalsize\@ucheadtrue}% +} +\def\section{% + \@startsection{section}{1}{\z@}{-10\p@ \@plus -4\p@ \@minus -2\p@}% GM + {4\p@}{\baselineskip 14pt\secfnt\@ucheadtrue}% +} + +\def\subsection{% + \@startsection{subsection}{2}{\z@}{-8\p@ \@plus -2\p@ \@minus -\p@} + {4\p@}{\secfnt}% +} +\def\subsubsection{% + \@startsection{subsubsection}{3}{\z@}{-8\p@ \@plus -2\p@ \@minus -\p@}% + {4\p@}{\subsecfnt}% +} +%\def\paragraph{% +% \vskip 12pt\@startsection{paragraph}{3}{\z@}{6\p@ \@plus \p@}% original +% {-5\p@}{\subsecfnt}% +%} +% If one wants sections, subsections and subsubsections numbered, +% but not paragraphs, one usually sets secnumepth to 3. +% For that, the "depth" of paragraphs must be given correctly +% in the definition (``4'' instead of ``3'' as second argument +% of @startsection): +\def\paragraph{% + \vskip 12pt\@startsection{paragraph}{4}{\z@}{6\p@ \@plus \p@}% % GM and Wolfgang May - 11/30/06 + {-5\p@}{\subsecfnt}% +} +\let\@period=. +\def\@startsection#1#2#3#4#5#6{% + \if at noskipsec %gkmt, 11 aug 99 + \global\let\@period\@empty + \leavevmode + \global\let\@period.% + \fi + \par % + \@tempskipa #4\relax + \@afterindenttrue + \ifdim \@tempskipa <\z@ + \@tempskipa -\@tempskipa + \@afterindentfalse + \fi + \if at nobreak + \everypar{}% + \else + \addpenalty\@secpenalty + \addvspace\@tempskipa + \fi +\parskip=0pt % GM July 2000 (non numbered) section heads + \@ifstar + {\@ssect{#3}{#4}{#5}{#6}} + {\@dblarg{\@sect{#1}{#2}{#3}{#4}{#5}{#6}}}% +} +\def\@sect#1#2#3#4#5#6[#7]#8{% + \ifnum #2>\c at secnumdepth + \let\@svsec\@empty + \else + \refstepcounter{#1}% + \edef\@svsec{% + \begingroup + %\ifnum#2>2 \noexpand\rm \fi % changed to next 29 July 2002 gkmt + \ifnum#2>2 \noexpand#6 \fi + \csname the#1\endcsname + \endgroup + \ifnum #2=1\relax .\fi + \hskip 1em + }% + \fi + \@tempskipa #5\relax + \ifdim \@tempskipa>\z@ + \begingroup + #6\relax + \@hangfrom{\hskip #3\relax\@svsec}% + \begingroup + \interlinepenalty \@M + \if at uchead + \uppercase{#8}% + \else + #8% + \fi + \par + \endgroup + \endgroup + \csname #1mark\endcsname{#7}% + \vskip -12pt %gkmt, 11 aug 99 and GM July 2000 (was -14) - numbered section head spacing +\addcontentsline{toc}{#1}{% + \ifnum #2>\c at secnumdepth \else + \protect\numberline{\csname the#1\endcsname}% + \fi + #7% + }% + \else + \def\@svsechd{% + #6% + \hskip #3\relax + \@svsec + \if at uchead + \uppercase{#8}% + \else + #8% + \fi + \csname #1mark\endcsname{#7}% + \addcontentsline{toc}{#1}{% + \ifnum #2>\c at secnumdepth \else + \protect\numberline{\csname the#1\endcsname}% + \fi + #7% + }% + }% + \fi + \@xsect{#5}\hskip 1pt + \par +} +\def\@xsect#1{% + \@tempskipa #1\relax + \ifdim \@tempskipa>\z@ + \par + \nobreak + \vskip \@tempskipa + \@afterheading + \else + \global\@nobreakfalse + \global\@noskipsectrue + \everypar{% + \if at noskipsec + \global\@noskipsecfalse + \clubpenalty\@M + \hskip -\parindent + \begingroup + \@svsechd + \@period + \endgroup + \unskip + \@tempskipa #1\relax + \hskip -\@tempskipa + \else + \clubpenalty \@clubpenalty + \everypar{}% + \fi + }% + \fi + \ignorespaces +} +\def\@trivlist{% + \@topsepadd\topsep + \if at noskipsec + \global\let\@period\@empty + \leavevmode + \global\let\@period.% + \fi + \ifvmode + \advance\@topsepadd\partopsep + \else + \unskip + \par + \fi + \if at inlabel + \@noparitemtrue + \@noparlisttrue + \else + \@noparlistfalse + \@topsep\@topsepadd + \fi + \advance\@topsep \parskip + \leftskip\z at skip + \rightskip\@rightskip + \parfillskip\@flushglue + \@setpar{\if at newlist\else{\@@par}\fi} + \global\@newlisttrue + \@outerparskip\parskip +} + +%%% Actually, 'abbrev' works just fine as the default +%%% Bibliography style. + +\typeout{Using 'Abbrev' bibliography style} +\newcommand\bibyear[2]{% + \unskip\quad\ignorespaces#1\unskip + \if#2..\quad \else \quad#2 \fi +} +\newcommand{\bibemph}[1]{{\em#1}} +\newcommand{\bibemphic}[1]{{\em#1\/}} +\newcommand{\bibsc}[1]{{\sc#1}} +\def\@normalcite{% + \def\@cite##1##2{[##1\if at tempswa , ##2\fi]}% +} +\def\@citeNB{% + \def\@cite##1##2{##1\if at tempswa , ##2\fi}% +} +\def\@citeRB{% + \def\@cite##1##2{##1\if at tempswa , ##2\fi]}% +} +\def\start at cite#1#2{% + \edef\citeauthoryear##1##2##3{% + ###1% + \ifnum#2=\z@ \else\ ###2\fi + }% + \ifnum#1=\thr@@ + \let\@@cite\@citeyear + \else + \let\@@cite\@citenormal + \fi + \@ifstar{\@citeNB\@@cite}{\@normalcite\@@cite}% +} +\def\cite{\start at cite23} +\def\citeNP{\cite*} +\def\citeA{\start at cite10} +\def\citeANP{\citeA*} +\def\shortcite{\start at cite23} +\def\shortciteNP{\shortcite*} +\def\shortciteA{\start at cite20} +\def\shortciteANP{\shortciteA*} +\def\citeyear{\start at cite30} +\def\citeyearNP{\citeyear*} +\def\citeN{% + \@citeRB + \def\citeauthoryear##1##2##3{##1\ [##3% + \def\reserved at a{##1}% + \def\citeauthoryear####1####2####3{% + \def\reserved at b{####1}% + \ifx\reserved at a\reserved at b + ####3% + \else + \errmessage{Package acmart Error: author mismatch + in \string\citeN^^J^^J% + See the acmart package documentation for explanation}% + \fi + }% + }% + \@ifstar\@citeyear\@citeyear +} +\def\shortciteN{% + \@citeRB + \def\citeauthoryear##1##2##3{##2\ [##3% + \def\reserved at a{##2}% + \def\citeauthoryear####1####2####3{% + \def\reserved at b{####2}% + \ifx\reserved at a\reserved at b + ####3% + \else + \errmessage{Package acmart Error: author mismatch + in \string\shortciteN^^J^^J% + See the acmart package documentation for explanation}% + \fi + }% + }% + \@ifstar\@citeyear\@citeyear % GM July 2000 +} +\def\@citenormal{% + \@ifnextchar [{\@tempswatrue\@citex;} + {\@tempswafalse\@citex,[]}% % GM July 2000 +} +\def\@citeyear{% + \@ifnextchar [{\@tempswatrue\@citex,}% + {\@tempswafalse\@citex,[]}% +} +\def\@citex#1[#2]#3{% + \let\@citea\@empty + \@cite{% + \@for\@citeb:=#3\do{% + \@citea + \def\@citea{#1 }% + \edef\@citeb{\expandafter\@iden\@citeb}% + \if at filesw + \immediate\write\@auxout{\string\citation{\@citeb}}% + \fi + \@ifundefined{b@\@citeb}{% + {\bf ?}% + \@warning{% + Citation `\@citeb' on page \thepage\space undefined% + }% + }% + {\csname b@\@citeb\endcsname}% + }% + }{#2}% +} +\let\@biblabel\@gobble +\newdimen\bibindent +\setcounter{enumi}{1} +\bibindent=0em +\def\thebibliography#1{% +\ifnum\addauflag=0\addauthorsection\global\addauflag=1\fi + \section[References]{% <=== OPTIONAL ARGUMENT ADDED HERE + {References} % was uppercased but this affects pdf bookmarks (SP/GM October 2004) + {\vskip -9pt plus 1pt} % GM Nov. 2006 / GM July 2000 (for somewhat tighter spacing) + \@mkboth{{\refname}}{{\refname}}% + }% + \list{[\arabic{enumi}]}{% + \settowidth\labelwidth{[#1]}% + \leftmargin\labelwidth + \advance\leftmargin\labelsep + \advance\leftmargin\bibindent + \parsep=0pt\itemsep=1pt % GM July 2000 + \itemindent -\bibindent + \listparindent \itemindent + \usecounter{enumi} + }% + \let\newblock\@empty + \raggedright % GM July 2000 + \sloppy + \sfcode`\.=1000\relax +} + + +\gdef\balancecolumns +{\vfill\eject +\global\@colht=\textheight +\global\ht\@cclv=\textheight +} + +\newcount\colcntr +\global\colcntr=0 +\newbox\savebox + +\gdef \@makecol {% +\global\advance\colcntr by 1 +\ifnum\colcntr>2 \global\colcntr=1\fi + \ifvoid\footins + \setbox\@outputbox \box\@cclv + \else + \setbox\@outputbox \vbox{% +\boxmaxdepth \@maxdepth + \@tempdima\dp\@cclv + \unvbox \@cclv + \vskip-\@tempdima + \vskip \skip\footins + \color at begingroup + \normalcolor + \footnoterule + \unvbox \footins + \color at endgroup + }% + \fi + \xdef\@freelist{\@freelist\@midlist}% + \global \let \@midlist \@empty + \@combinefloats + \ifvbox\@kludgeins + \@makespecialcolbox + \else + \setbox\@outputbox \vbox to\@colht {% +\@texttop + \dimen@ \dp\@outputbox + \unvbox \@outputbox + \vskip -\dimen@ + \@textbottom + }% + \fi + \global \maxdepth \@maxdepth +} +\def\titlenote{\@ifnextchar[\@xtitlenote{\stepcounter\@mpfn +\global\advance\titlenotecount by 1 +\ifnum\titlenotecount=1 + \raisebox{9pt}{$\ast$} +\fi +\ifnum\titlenotecount=2 + \raisebox{9pt}{$\dagger$} +\fi +\ifnum\titlenotecount=3 + \raisebox{9pt}{$\ddagger$} +\fi +\ifnum\titlenotecount=4 +\raisebox{9pt}{$\S$} +\fi +\ifnum\titlenotecount=5 +\raisebox{9pt}{$\P$} +\fi + \@titlenotetext +}} + +\long\def\@titlenotetext#1{\insert\footins{% +\ifnum\titlenotecount=1\global\tntoks={#1}\fi +\ifnum\titlenotecount=2\global\tntokstwo={#1}\fi +\ifnum\titlenotecount=3\global\tntoksthree={#1}\fi +\ifnum\titlenotecount=4\global\tntoksfour={#1}\fi +\ifnum\titlenotecount=5\global\tntoksfive={#1}\fi + \reset at font\footnotesize + \interlinepenalty\interfootnotelinepenalty + \splittopskip\footnotesep + \splitmaxdepth \dp\strutbox \floatingpenalty \@MM + \hsize\columnwidth \@parboxrestore + \protected at edef\@currentlabel{% + }% + \color at begingroup + \color at endgroup}} + +%%%%%%%%%%%%%%%%%%%%%%%%% +\ps at plain +\baselineskip=11pt +\let\thepage\relax % For NO page numbers - GM Nov. 30th. 1999 and July 2000 +\def\setpagenumber#1{\global\setcounter{page}{#1}} +%\pagenumbering{arabic} % Arabic page numbers GM July 2000 +\twocolumn % Double column. +\flushbottom % Even bottom -- alas, does not balance columns at end of document +\pagestyle{plain} + +% Need Copyright Year and Copyright Data to be user definable (in .tex file). +% Gerry Nov. 30th. 1999 +\newtoks\copyrtyr +\newtoks\acmcopyr +\newtoks\boilerplate +\global\acmcopyr={X-XXXXX-XX-X/XX/XX} % Default - 5/11/2001 *** Gerry +\global\copyrtyr={200X} % Default - 3/3/2003 *** Gerry +\def\CopyrightYear#1{\global\copyrtyr{#1}} +\def\crdata#1{\global\acmcopyr{#1}} +\def\permission#1{\global\boilerplate{#1}} +% +\global\boilerplate={Permission to make digital or hard copies of all or part of this work for personal or classroom use is granted without fee provided that copies are not made or distributed for profit or commercial advantage and that copies bear this notice and the full citation on the first page. To copy otherwise, to republish, to post on servers or to redistribute to lists, requires prior specific permission and/or a fee.} +\newtoks\copyrightetc +\global\copyrightetc{Copyright \the\copyrtyr\ ACM \the\acmcopyr\ ...\$5.00} +\toappear{\the\boilerplate\par +{\confname{\the\conf}} \the\confinfo\par \the\copyrightetc.} +%\DeclareFixedFont{\altcrnotice}{OT1}{tmr}{m}{n}{8} % << patch needed for accenting e.g. Montreal - Gerry, May 2007 +%\DeclareFixedFont{\altconfname}{OT1}{tmr}{m}{it}{8} % << patch needed for accenting in italicized confname - Gerry, May 2007 +% +%{\altconfname{{\the\conf}}} {\altcrnotice\the\confinfo\par} \the\copyrightetc.} % << Gerry, May 2007 +% +% The following section (i.e. 3 .sty inclusions) was added in May 2007 so as to fix the problems that many +% authors were having with accents. Sometimes accents would occur, but the letter-character would be of a different +% font. Conversely the letter-character font would be correct but, e.g. a 'bar' would appear superimposed on the +% character instead of, say, an unlaut/diaresis. Sometimes the letter-character would NOT appear at all. +% Using [T1]{fontenc} outright was not an option as this caused 99% of the authors to 'produce' a Type-3 (bitmapped) +% PDF file - useless for production. +% +% For proper (font) accenting we NEED these packages to be part of the .cls file i.e. 'ae', 'aecompl' and 'aeguil' +% ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +%% This is file `ae.sty' +\def\fileversion{1.3} +\def\filedate{2001/02/12} +\NeedsTeXFormat{LaTeX2e} +%\ProvidesPackage{ae}[\filedate\space\fileversion\space % GM +% Almost European Computer Modern] % GM - keeping the log file clean(er) +\newif\if at ae@slides \@ae at slidesfalse +\DeclareOption{slides}{\@ae at slidestrue} +\ProcessOptions +\fontfamily{aer} +\RequirePackage[T1]{fontenc} +\if at ae@slides + \renewcommand{\sfdefault}{laess} + \renewcommand{\rmdefault}{laess} % no roman + \renewcommand{\ttdefault}{laett} +\else + \renewcommand{\sfdefault}{aess} + \renewcommand{\rmdefault}{aer} + \renewcommand{\ttdefault}{aett} +\fi +\endinput +%% +%% End of file `ae.sty'. +% +% +\def\fileversion{0.9} +\def\filedate{1998/07/23} +\NeedsTeXFormat{LaTeX2e} +%\ProvidesPackage{aecompl}[\filedate\space\fileversion\space % GM +%T1 Complements for AE fonts (D. Roegel)] % GM -- keeping the log file clean(er) + +\def\@ae at compl#1{{\fontencoding{T1}\fontfamily{cmr}\selectfont\symbol{#1}}} +\def\guillemotleft{\@ae at compl{19}} +\def\guillemotright{\@ae at compl{20}} +\def\guilsinglleft{\@ae at compl{14}} +\def\guilsinglright{\@ae at compl{15}} +\def\TH{\@ae at compl{222}} +\def\NG{\@ae at compl{141}} +\def\ng{\@ae at compl{173}} +\def\th{\@ae at compl{254}} +\def\DJ{\@ae at compl{208}} +\def\dj{\@ae at compl{158}} +\def\DH{\@ae at compl{208}} +\def\dh{\@ae at compl{240}} +\def\@perthousandzero{\@ae at compl{24}} +\def\textperthousand{\%\@perthousandzero} +\def\textpertenthousand{\%\@perthousandzero\@perthousandzero} +\endinput +% +% +%% This is file `aeguill.sty' +% This file gives french guillemets (and not guillemots!) +% built with the Polish CMR fonts (default), WNCYR fonts, the LASY fonts +% or with the EC fonts. +% This is useful in conjunction with the ae package +% (this package loads the ae package in case it has not been loaded) +% and with or without the french(le) package. +% +% In order to get the guillemets, it is necessary to either type +% \guillemotleft and \guillemotright, or to use an 8 bit encoding +% (such as ISO-Latin1) which selects these two commands, +% or, if you use the french package (but not the frenchle package), +% to type << or >>. +% +% By default, you get the Polish CMR guillemets; if this package is loaded +% with the `cm' option, you get the LASY guillemets; with `ec,' you +% get the EC guillemets, and with `cyr,' you get the cyrillic guillemets. +% +% In verbatim mode, you always get the EC/TT guillemets. +% +% The default option is interesting in conjunction with PDF, +% because there is a Type 1 version of the Polish CMR fonts +% and these guillemets are very close in shape to the EC guillemets. +% There are no free Type 1 versions of the EC fonts. +% +% Support for Polish CMR guillemets was kindly provided by +% Rolf Niepraschk in version 0.99 (2000/05/22). +% Bernd Raichle provided extensive simplifications to the code +% for version 1.00. +% +% This package is released under the LPPL. +% +% Changes: +% Date version +% 2001/04/12 1.01 the frenchle and french package are now distinguished. +% +\def\fileversion{1.01} +\def\filedate{2001/04/12} +\NeedsTeXFormat{LaTeX2e} +%\ProvidesPackage{aeguill}[2001/04/12 1.01 % % GM +%AE fonts with french guillemets (D. Roegel)] % GM - keeping the log file clean(er) +%\RequirePackage{ae} % GM May 2007 - already embedded here + +\newcommand{\@ae at switch}[4]{#4} +\DeclareOption{ec}{\renewcommand\@ae at switch[4]{#1}} +\DeclareOption{cm}{\renewcommand\@ae at switch[4]{#2}} +\DeclareOption{cyr}{\renewcommand\@ae at switch[4]{#3}} +\DeclareOption{pl}{\renewcommand\@ae at switch[4]{#4}} +\ExecuteOptions{pl} +\ProcessOptions + +% +% Load necessary packages +% +\@ae at switch{% ec + % do nothing +}{% cm + \RequirePackage{latexsym}% GM - May 2007 - already 'mentioned as required' up above +}{% cyr + \RequirePackage[OT2,T1]{fontenc}% +}{% pl + \RequirePackage[OT4,T1]{fontenc}% +} + +% The following command will be compared to \frenchname, +% as defined in french.sty and frenchle.sty. +\def\aeguillfrenchdefault{french}% + +\let\guill at verbatim@font\verbatim at font +\def\verbatim at font{\guill at verbatim@font\ecguills{cmtt}% + \let\guillemotleft\@oguills\let\guillemotright\@fguills} + +\begingroup \catcode`\<=13 \catcode`\>=13 +\def\x{\endgroup + \def\ae at lfguill{<<}% + \def\ae at rfguill{>>}% +}\x + +\newcommand{\ecguills}[1]{% + \def\selectguillfont{\fontencoding{T1}\fontfamily{#1}\selectfont}% + \def\@oguills{{\selectguillfont\symbol{19}}}% + \def\@fguills{{\selectguillfont\symbol{20}}}% + } + +\newcommand{\aeguills}{% + \ae at guills + % We redefine \guillemotleft and \guillemotright + % in order to catch them when they are used + % with \DeclareInputText (in latin1.def for instance) + % We use \auxWARNINGi as a safe indicator that french.sty is used. + \gdef\guillemotleft{\ifx\auxWARNINGi\undefined + \@oguills % neither french.sty nor frenchle.sty + \else + \ifx\aeguillfrenchdefault\frenchname + \ae at lfguill % french.sty + \else + \@oguills % frenchle.sty + \fi + \fi}% + \gdef\guillemotright{\ifx\auxWARNINGi\undefined + \@fguills % neither french.sty nor frenchle.sty + \else + \ifx\aeguillfrenchdefault\frenchname + \ae at rfguill % french.sty + \else + \@fguills % frenchle.sty + \fi + \fi}% + } + +% +% Depending on the class option +% define the internal command \ae at guills +\@ae at switch{% ec + \newcommand{\ae at guills}{% + \ecguills{cmr}}% +}{% cm + \newcommand{\ae at guills}{% + \def\selectguillfont{\fontencoding{U}\fontfamily{lasy}% + \fontseries{m}\fontshape{n}\selectfont}% + \def\@oguills{\leavevmode\nobreak + \hbox{\selectguillfont (\kern-.20em(\kern.20em}\nobreak}% + \def\@fguills{\leavevmode\nobreak + \hbox{\selectguillfont \kern.20em)\kern-.2em)}% + \ifdim\fontdimen\@ne\font>\z@\/\fi}}% +}{% cyr + \newcommand{\ae at guills}{% + \def\selectguillfont{\fontencoding{OT2}\fontfamily{wncyr}\selectfont}% + \def\@oguills{{\selectguillfont\symbol{60}}}% + \def\@fguills{{\selectguillfont\symbol{62}}}} +}{% pl + \newcommand{\ae at guills}{% + \def\selectguillfont{\fontencoding{OT4}\fontfamily{cmr}\selectfont}% + \def\@oguills{{\selectguillfont\symbol{174}}}% + \def\@fguills{{\selectguillfont\symbol{175}}}} +} + + +\AtBeginDocument{% + \ifx\GOfrench\undefined + \aeguills + \else + \let\aeguill at GOfrench\GOfrench + \gdef\GOfrench{\aeguill at GOfrench \aeguills}% + \fi + } + +\endinput +% +%~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + diff --git a/talk/icooolps2011/figures/trace-levels.pdf b/talk/icooolps2011/figures/trace-levels.pdf new file mode 100644 index 0000000000000000000000000000000000000000..ac54d57f68c799ef0594e46997952e78d0221a8a GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 23 15:08:20 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 23 Mar 2011 15:08:20 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: merge Message-ID: <20110323140820.839F72A202D@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3392:673798beefc0 Date: 2011-03-23 15:08 +0100 http://bitbucket.org/pypy/extradoc/changeset/673798beefc0/ Log: merge From commits-noreply at bitbucket.org Wed Mar 23 19:05:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:05:57 +0100 (CET) Subject: [pypy-svn] pypy default: Tweaks to the distutils build_ext command on win32 Message-ID: <20110323180557.1EACB2A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42869:5002f6e9f7d5 Date: 2011-03-23 11:29 +0100 http://bitbucket.org/pypy/pypy/changeset/5002f6e9f7d5/ Log: Tweaks to the distutils build_ext command on win32 to reflect pypy directories layout diff --git a/lib-python/modified-2.7.0/distutils/command/build_ext.py b/lib-python/modified-2.7.0/distutils/command/build_ext.py --- a/lib-python/modified-2.7.0/distutils/command/build_ext.py +++ b/lib-python/modified-2.7.0/distutils/command/build_ext.py @@ -184,7 +184,7 @@ # the 'libs' directory is for binary installs - we assume that # must be the *native* platform. But we don't really support # cross-compiling via a binary install anyway, so we let it go. - self.library_dirs.append(os.path.join(sys.exec_prefix, 'libs')) + self.library_dirs.append(os.path.join(sys.exec_prefix, 'include')) if self.debug: self.build_temp = os.path.join(self.build_temp, "Debug") else: @@ -192,8 +192,13 @@ # Append the source distribution include and library directories, # this allows distutils on windows to work in the source tree - self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC')) - if MSVC_VERSION == 9: + if 0: + # pypy has no PC directory + self.include_dirs.append(os.path.join(sys.exec_prefix, 'PC')) + if 1: + # pypy has no PCBuild directory + pass + elif MSVC_VERSION == 9: # Use the .lib files for the correct architecture if self.plat_name == 'win32': suffix = '' @@ -695,24 +700,14 @@ shared extension. On most platforms, this is just 'ext.libraries'; on Windows and OS/2, we add the Python library (eg. python20.dll). """ - # The python library is always needed on Windows. For MSVC, this - # is redundant, since the library is mentioned in a pragma in - # pyconfig.h that MSVC groks. The other Windows compilers all seem - # to need it mentioned explicitly, though, so that's what we do. - # Append '_d' to the python import library on debug builds. + # The python library is always needed on Windows. if sys.platform == "win32": - from distutils.msvccompiler import MSVCCompiler - if not isinstance(self.compiler, MSVCCompiler): - template = "python%d%d" - if self.debug: - template = template + '_d' - pythonlib = (template % - (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) - # don't extend ext.libraries, it may be shared with other - # extensions, it is a reference to the original list - return ext.libraries + [pythonlib] - else: - return ext.libraries + template = "python%d%d" + pythonlib = (template % + (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff)) + # don't extend ext.libraries, it may be shared with other + # extensions, it is a reference to the original list + return ext.libraries + [pythonlib] elif sys.platform == "os2emx": # EMX/GCC requires the python library explicitly, and I # believe VACPP does as well (though not confirmed) - AIM Apr01 From commits-noreply at bitbucket.org Wed Mar 23 19:05:57 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:05:57 +0100 (CET) Subject: [pypy-svn] pypy default: VS10 requires the /MANIFEST option Message-ID: <20110323180557.AB8302A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42870:ec11c6271773 Date: 2011-03-23 11:30 +0100 http://bitbucket.org/pypy/pypy/changeset/ec11c6271773/ Log: VS10 requires the /MANIFEST option diff --git a/lib-python/modified-2.7.0/distutils/msvc9compiler.py b/lib-python/modified-2.7.0/distutils/msvc9compiler.py --- a/lib-python/modified-2.7.0/distutils/msvc9compiler.py +++ b/lib-python/modified-2.7.0/distutils/msvc9compiler.py @@ -644,6 +644,7 @@ temp_manifest = os.path.join( build_temp, os.path.basename(output_filename) + ".manifest") + ld_args.append('/MANIFEST') ld_args.append('/MANIFESTFILE:' + temp_manifest) if extra_preargs: From commits-noreply at bitbucket.org Wed Mar 23 19:05:58 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:05:58 +0100 (CET) Subject: [pypy-svn] pypy default: Add empty header files to please some extension modules Message-ID: <20110323180558.DFBA02A2030@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42871:abb211dd76dc Date: 2011-03-23 11:30 +0100 http://bitbucket.org/pypy/pypy/changeset/abb211dd76dc/ Log: Add empty header files to please some extension modules diff --git a/pypy/module/cpyext/include/import.h b/pypy/module/cpyext/include/import.h new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/include/import.h @@ -0,0 +1,1 @@ +/* empty */ diff --git a/pypy/module/cpyext/include/abstract.h b/pypy/module/cpyext/include/abstract.h new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/include/abstract.h @@ -0,0 +1,1 @@ +/* empty */ diff --git a/pypy/module/cpyext/include/longintrepr.h b/pypy/module/cpyext/include/longintrepr.h new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/include/longintrepr.h @@ -0,0 +1,1 @@ +/* empty */ From commits-noreply at bitbucket.org Wed Mar 23 19:06:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:01 +0100 (CET) Subject: [pypy-svn] pypy default: Move PyFile_* function to a new pyfile.py Message-ID: <20110323180601.4B8222A2030@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42872:df872fa8f4d0 Date: 2011-03-23 11:57 +0100 http://bitbucket.org/pypy/pypy/changeset/df872fa8f4d0/ Log: Move PyFile_* function to a new pyfile.py diff --git a/pypy/module/cpyext/test/test_object.py b/pypy/module/cpyext/test/test_object.py --- a/pypy/module/cpyext/test/test_object.py +++ b/pypy/module/cpyext/test/test_object.py @@ -3,9 +3,8 @@ from pypy.module.cpyext.test.test_api import BaseApiTest from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase from pypy.rpython.lltypesystem import rffi, lltype -from pypy.module.cpyext.api import Py_LT, Py_LE, Py_NE, Py_EQ,\ - Py_GE, Py_GT, fopen, fclose, fwrite -from pypy.tool.udir import udir +from pypy.module.cpyext.api import ( + Py_LT, Py_LE, Py_NE, Py_EQ, Py_GE, Py_GT) class TestObject(BaseApiTest): def test_IsTrue(self, space, api): @@ -181,52 +180,6 @@ assert api.PyObject_Unicode(space.wrap("\xe9")) is None api.PyErr_Clear() - def test_file_fromstring(self, space, api): - filename = rffi.str2charp(str(udir / "_test_file")) - mode = rffi.str2charp("wb") - w_file = api.PyFile_FromString(filename, mode) - rffi.free_charp(filename) - rffi.free_charp(mode) - - assert api.PyFile_Check(w_file) - assert api.PyFile_CheckExact(w_file) - assert not api.PyFile_Check(space.wrap("text")) - - space.call_method(w_file, "write", space.wrap("text")) - space.call_method(w_file, "close") - assert (udir / "_test_file").read() == "text" - - def test_file_getline(self, space, api): - filename = rffi.str2charp(str(udir / "_test_file")) - - mode = rffi.str2charp("w") - w_file = api.PyFile_FromString(filename, mode) - space.call_method(w_file, "write", - space.wrap("line1\nline2\nline3\nline4")) - space.call_method(w_file, "close") - - rffi.free_charp(mode) - mode = rffi.str2charp("r") - w_file = api.PyFile_FromString(filename, mode) - rffi.free_charp(filename) - rffi.free_charp(mode) - - w_line = api.PyFile_GetLine(w_file, 0) - assert space.str_w(w_line) == "line1\n" - - w_line = api.PyFile_GetLine(w_file, 4) - assert space.str_w(w_line) == "line" - - w_line = api.PyFile_GetLine(w_file, 0) - assert space.str_w(w_line) == "2\n" - - # XXX We ought to raise an EOFError here, but don't - w_line = api.PyFile_GetLine(w_file, -1) - # assert api.PyErr_Occurred() is space.w_EOFError - assert space.str_w(w_line) == "line3\n" - - space.call_method(w_file, "close") - class AppTestObject(AppTestCpythonExtensionBase): def setup_class(cls): AppTestCpythonExtensionBase.setup_class.im_func(cls) diff --git a/pypy/module/cpyext/test/test_pyfile.py b/pypy/module/cpyext/test/test_pyfile.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/test/test_pyfile.py @@ -0,0 +1,53 @@ +from pypy.module.cpyext.api import fopen, fclose, fwrite +from pypy.module.cpyext.test.test_api import BaseApiTest +from pypy.rpython.lltypesystem import rffi, lltype +from pypy.tool.udir import udir +import py + +class TestFile(BaseApiTest): + + def test_file_fromstring(self, space, api): + filename = rffi.str2charp(str(udir / "_test_file")) + mode = rffi.str2charp("wb") + w_file = api.PyFile_FromString(filename, mode) + rffi.free_charp(filename) + rffi.free_charp(mode) + + assert api.PyFile_Check(w_file) + assert api.PyFile_CheckExact(w_file) + assert not api.PyFile_Check(space.wrap("text")) + + space.call_method(w_file, "write", space.wrap("text")) + space.call_method(w_file, "close") + assert (udir / "_test_file").read() == "text" + + def test_file_getline(self, space, api): + filename = rffi.str2charp(str(udir / "_test_file")) + + mode = rffi.str2charp("w") + w_file = api.PyFile_FromString(filename, mode) + space.call_method(w_file, "write", + space.wrap("line1\nline2\nline3\nline4")) + space.call_method(w_file, "close") + + rffi.free_charp(mode) + mode = rffi.str2charp("r") + w_file = api.PyFile_FromString(filename, mode) + rffi.free_charp(filename) + rffi.free_charp(mode) + + w_line = api.PyFile_GetLine(w_file, 0) + assert space.str_w(w_line) == "line1\n" + + w_line = api.PyFile_GetLine(w_file, 4) + assert space.str_w(w_line) == "line" + + w_line = api.PyFile_GetLine(w_file, 0) + assert space.str_w(w_line) == "2\n" + + # XXX We ought to raise an EOFError here, but don't + w_line = api.PyFile_GetLine(w_file, -1) + # assert api.PyErr_Occurred() is space.w_EOFError + assert space.str_w(w_line) == "line3\n" + + space.call_method(w_file, "close") diff --git a/pypy/module/cpyext/__init__.py b/pypy/module/cpyext/__init__.py --- a/pypy/module/cpyext/__init__.py +++ b/pypy/module/cpyext/__init__.py @@ -50,6 +50,7 @@ import pypy.module.cpyext.pypyintf import pypy.module.cpyext.memoryobject import pypy.module.cpyext.codecs +import pypy.module.cpyext.pyfile # now that all rffi_platform.Struct types are registered, configure them api.configure_types() diff --git a/pypy/module/cpyext/object.py b/pypy/module/cpyext/object.py --- a/pypy/module/cpyext/object.py +++ b/pypy/module/cpyext/object.py @@ -2,13 +2,12 @@ from pypy.module.cpyext.api import ( cpython_api, generic_cpy_call, CANNOT_FAIL, Py_ssize_t, Py_ssize_tP, PyVarObject, Py_TPFLAGS_HEAPTYPE, Py_LT, Py_LE, Py_EQ, Py_NE, Py_GT, - Py_GE, CONST_STRING, FILEP, fwrite, build_type_checkers) + Py_GE, CONST_STRING, FILEP, fwrite) from pypy.module.cpyext.pyobject import ( PyObject, PyObjectP, create_ref, from_ref, Py_IncRef, Py_DecRef, track_reference, get_typedescr, RefcountState) from pypy.module.cpyext.typeobject import PyTypeObjectPtr from pypy.module.cpyext.pyerrors import PyErr_NoMemory, PyErr_BadInternalCall -from pypy.module._file.interp_file import W_File from pypy.objspace.std.objectobject import W_ObjectObject from pypy.objspace.std.typeobject import W_TypeObject from pypy.interpreter.error import OperationError @@ -429,40 +428,3 @@ rffi.free_nonmovingbuffer(data, buf) return 0 -PyFile_Check, PyFile_CheckExact = build_type_checkers("File", W_File) - - at cpython_api([PyObject, rffi.INT_real], PyObject) -def PyFile_GetLine(space, w_obj, n): - """ - Equivalent to p.readline([n]), this function reads one line from the - object p. p may be a file object or any object with a readline() - method. If n is 0, exactly one line is read, regardless of the length of - the line. If n is greater than 0, no more than n bytes will be read - from the file; a partial line can be returned. In both cases, an empty string - is returned if the end of the file is reached immediately. If n is less than - 0, however, one line is read regardless of length, but EOFError is - raised if the end of the file is reached immediately.""" - try: - w_readline = space.getattr(w_obj, space.wrap('readline')) - except OperationError: - raise OperationError( - space.w_TypeError, space.wrap( - "argument must be a file, or have a readline() method.")) - - n = rffi.cast(lltype.Signed, n) - if space.is_true(space.gt(space.wrap(n), space.wrap(0))): - return space.call_function(w_readline, space.wrap(n)) - elif space.is_true(space.lt(space.wrap(n), space.wrap(0))): - return space.call_function(w_readline) - else: - # XXX Raise EOFError as specified - return space.call_function(w_readline) - at cpython_api([CONST_STRING, CONST_STRING], PyObject) -def PyFile_FromString(space, filename, mode): - """ - On success, return a new file object that is opened on the file given by - filename, with a file mode given by mode, where mode has the same - semantics as the standard C routine fopen(). On failure, return NULL.""" - w_filename = space.wrap(rffi.charp2str(filename)) - w_mode = space.wrap(rffi.charp2str(mode)) - return space.call_method(space.builtin, 'file', w_filename, w_mode) diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/pyfile.py @@ -0,0 +1,46 @@ +from pypy.rpython.lltypesystem import rffi, lltype +from pypy.module.cpyext.api import ( + cpython_api, CONST_STRING, build_type_checkers) +from pypy.module.cpyext.pyobject import ( + PyObject) +from pypy.interpreter.error import OperationError +from pypy.module._file.interp_file import W_File + +PyFile_Check, PyFile_CheckExact = build_type_checkers("File", W_File) + + at cpython_api([PyObject, rffi.INT_real], PyObject) +def PyFile_GetLine(space, w_obj, n): + """ + Equivalent to p.readline([n]), this function reads one line from the + object p. p may be a file object or any object with a readline() + method. If n is 0, exactly one line is read, regardless of the length of + the line. If n is greater than 0, no more than n bytes will be read + from the file; a partial line can be returned. In both cases, an empty string + is returned if the end of the file is reached immediately. If n is less than + 0, however, one line is read regardless of length, but EOFError is + raised if the end of the file is reached immediately.""" + try: + w_readline = space.getattr(w_obj, space.wrap('readline')) + except OperationError: + raise OperationError( + space.w_TypeError, space.wrap( + "argument must be a file, or have a readline() method.")) + + n = rffi.cast(lltype.Signed, n) + if space.is_true(space.gt(space.wrap(n), space.wrap(0))): + return space.call_function(w_readline, space.wrap(n)) + elif space.is_true(space.lt(space.wrap(n), space.wrap(0))): + return space.call_function(w_readline) + else: + # XXX Raise EOFError as specified + return space.call_function(w_readline) + + at cpython_api([CONST_STRING, CONST_STRING], PyObject) +def PyFile_FromString(space, filename, mode): + """ + On success, return a new file object that is opened on the file given by + filename, with a file mode given by mode, where mode has the same + semantics as the standard C routine fopen(). On failure, return NULL.""" + w_filename = space.wrap(rffi.charp2str(filename)) + w_mode = space.wrap(rffi.charp2str(mode)) + return space.call_method(space.builtin, 'file', w_filename, w_mode) From commits-noreply at bitbucket.org Wed Mar 23 19:06:02 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:02 +0100 (CET) Subject: [pypy-svn] pypy default: Add stubs for PyFile_Fromfile and PyFile_SetBufSize Message-ID: <20110323180602.4538C2A2030@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42873:21bd5083cac8 Date: 2011-03-23 13:00 +0100 http://bitbucket.org/pypy/pypy/changeset/21bd5083cac8/ Log: Add stubs for PyFile_Fromfile and PyFile_SetBufSize diff --git a/pypy/module/cpyext/test/test_pyfile.py b/pypy/module/cpyext/test/test_pyfile.py --- a/pypy/module/cpyext/test/test_pyfile.py +++ b/pypy/module/cpyext/test/test_pyfile.py @@ -2,7 +2,7 @@ from pypy.module.cpyext.test.test_api import BaseApiTest from pypy.rpython.lltypesystem import rffi, lltype from pypy.tool.udir import udir -import py +import pytest class TestFile(BaseApiTest): @@ -51,3 +51,12 @@ assert space.str_w(w_line) == "line3\n" space.call_method(w_file, "close") + + @pytest.mark.xfail + def test_file_fromfile(self, space, api): + api.PyFile_Fromfile() + + @pytest.mark.xfail + def test_file_setbufsize(self, space, api): + api.PyFile_SetBufSize() + diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -1,6 +1,6 @@ from pypy.rpython.lltypesystem import rffi, lltype from pypy.module.cpyext.api import ( - cpython_api, CONST_STRING, build_type_checkers) + cpython_api, CONST_STRING, FILEP, build_type_checkers) from pypy.module.cpyext.pyobject import ( PyObject) from pypy.interpreter.error import OperationError @@ -44,3 +44,16 @@ w_filename = space.wrap(rffi.charp2str(filename)) w_mode = space.wrap(rffi.charp2str(mode)) return space.call_method(space.builtin, 'file', w_filename, w_mode) + + at cpython_api([FILEP, CONST_STRING, CONST_STRING, rffi.VOIDP], PyObject) +def PyFile_FromFile(space, fp, name, mode, close): + """Create a new PyFileObject from the already-open standard C file + pointer, fp. The function close will be called when the file should be + closed. Return NULL on failure.""" + raise NotImplementedError + + at cpython_api([PyObject, rffi.INT_real], lltype.Void) +def PyFile_SetBufSize(space, w_file, n): + """Available on systems with setvbuf() only. This should only be called + immediately after file object creation.""" + raise NotImplementedError From commits-noreply at bitbucket.org Wed Mar 23 19:06:03 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:03 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: implement _PyLong_FromByteArray Message-ID: <20110323180603.A06242A2039@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42874:6b73431fa19f Date: 2011-03-23 14:26 +0100 http://bitbucket.org/pypy/pypy/changeset/6b73431fa19f/ Log: cpyext: implement _PyLong_FromByteArray diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py --- a/pypy/module/cpyext/longobject.py +++ b/pypy/module/cpyext/longobject.py @@ -4,6 +4,7 @@ from pypy.objspace.std.longobject import W_LongObject from pypy.interpreter.error import OperationError from pypy.module.cpyext.intobject import PyInt_AsUnsignedLongMask +from pypy.rlib.rbigint import rbigint PyLong_Check, PyLong_CheckExact = build_type_checkers("Long") @@ -177,4 +178,28 @@ assert isinstance(w_long, W_LongObject) return w_long.num.sign + at cpython_api([CONST_STRING, rffi.SIZE_T, rffi.INT_real, rffi.INT_real], PyObject) +def _PyLong_FromByteArray(space, bytes, n, little_endian, signed): + s = rffi.charpsize2str(bytes, n) + result = rbigint() + negative = False + for i in range(0, n): + if little_endian: + c = ord(s[i]) + else: + c = ord(s[n - i - 1]) + if i == 0 and signed and c & 0x80: + negative = True + if negative: + c = c ^ 0xFF + digit = rbigint.fromint(c) + + result = result.lshift(8) + result = result.add(digit) + + if negative: + result = result.neg() + + return space.newlong_from_rbigint(result) + diff --git a/pypy/module/cpyext/test/test_longobject.py b/pypy/module/cpyext/test/test_longobject.py --- a/pypy/module/cpyext/test/test_longobject.py +++ b/pypy/module/cpyext/test/test_longobject.py @@ -144,3 +144,20 @@ """), ]) assert module.from_string() == 0x1234 + + def test_frombytearray(self): + module = self.import_extension('foo', [ + ("from_bytearray", "METH_VARARGS", + """ + int little_endian, is_signed; + if (!PyArg_ParseTuple(args, "ii", &little_endian, &is_signed)) + return NULL; + return _PyLong_FromByteArray("\x9A\xBC", 2, + little_endian, is_signed); + """), + ]) + assert module.from_bytearray(True, False) == 0x9ABC + assert module.from_bytearray(True, True) == -0x6543 + assert module.from_bytearray(False, False) == 0xBC9A + assert module.from_bytearray(False, True) == -0x4365 + From commits-noreply at bitbucket.org Wed Mar 23 19:06:09 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:09 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: implement PySys_WriteStdout, PySys_WriteStderr Message-ID: <20110323180609.7E2B52A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42875:2cb198326c07 Date: 2011-03-23 14:52 +0100 http://bitbucket.org/pypy/pypy/changeset/2cb198326c07/ Log: cpyext: implement PySys_WriteStdout, PySys_WriteStderr diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -75,6 +75,9 @@ error indicator temporarily; use PyErr_Fetch() to save the current exception state.""" state = space.fromcache(State) + if w_type is None: + state.clear_exception() + return state.set_exception(OperationError(w_type, w_value)) Py_DecRef(space, w_type) Py_DecRef(space, w_value) diff --git a/pypy/module/cpyext/api.py b/pypy/module/cpyext/api.py --- a/pypy/module/cpyext/api.py +++ b/pypy/module/cpyext/api.py @@ -314,6 +314,7 @@ 'Py_BuildValue', 'Py_VaBuildValue', 'PyTuple_Pack', 'PyErr_Format', 'PyErr_NewException', 'PyErr_NewExceptionWithDoc', + 'PySys_WriteStdout', 'PySys_WriteStderr', 'PyEval_CallFunction', 'PyEval_CallMethod', 'PyObject_CallFunction', 'PyObject_CallMethod', 'PyObject_CallFunctionObjArgs', 'PyObject_CallMethodObjArgs', @@ -883,6 +884,7 @@ source_dir / "stringobject.c", source_dir / "mysnprintf.c", source_dir / "pythonrun.c", + source_dir / "sysmodule.c", source_dir / "bufferobject.c", source_dir / "object.c", source_dir / "cobject.c", diff --git a/pypy/module/cpyext/src/sysmodule.c b/pypy/module/cpyext/src/sysmodule.c new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/src/sysmodule.c @@ -0,0 +1,103 @@ +#include + +/* Reimplementation of PyFile_WriteString() no calling indirectly + PyErr_CheckSignals(): avoid the call to PyObject_Str(). */ + +static int +sys_pyfile_write_unicode(PyObject *unicode, PyObject *file) +{ + PyObject *writer = NULL, *args = NULL, *result = NULL; + int err; + + if (file == NULL) + return -1; + + writer = PyObject_GetAttrString(file, "write"); + if (writer == NULL) + goto error; + + args = PyTuple_Pack(1, unicode); + if (args == NULL) + goto error; + + result = PyEval_CallObject(writer, args); + if (result == NULL) { + goto error; + } else { + err = 0; + goto finally; + } + +error: + err = -1; +finally: + Py_XDECREF(writer); + Py_XDECREF(args); + Py_XDECREF(result); + return err; +} + +static int +sys_pyfile_write(const char *text, PyObject *file) +{ + PyObject *unicode = NULL; + int err; + + if (file == NULL) + return -1; + + unicode = PyUnicode_FromString(text); + if (unicode == NULL) + return -1; + + err = sys_pyfile_write_unicode(unicode, file); + Py_DECREF(unicode); + return err; +} + +/* APIs to write to sys.stdout or sys.stderr using a printf-like interface. + */ + +static void +sys_write(char *name, FILE *fp, const char *format, va_list va) +{ + PyObject *file; + PyObject *error_type, *error_value, *error_traceback; + char buffer[1001]; + int written; + + PyErr_Fetch(&error_type, &error_value, &error_traceback); + file = PySys_GetObject(name); + written = vsnprintf(buffer, sizeof(buffer), format, va); + if (sys_pyfile_write(buffer, file) != 0) { + PyErr_Clear(); + fputs(buffer, fp); + } + if (written < 0 || (size_t)written >= sizeof(buffer)) { + const char *truncated = "... truncated"; + if (sys_pyfile_write(truncated, file) != 0) + fputs(truncated, fp); + } + PyErr_Restore(error_type, error_value, error_traceback); +} + +void +PySys_WriteStdout(const char *format, ...) +{ + va_list va; + + va_start(va, format); + sys_write("stdout", stdout, format, va); + va_end(va); +} + +void +PySys_WriteStderr(const char *format, ...) +{ + va_list va; + + va_start(va, format); + sys_write("stderr", stderr, format, va); + va_end(va); +} + diff --git a/pypy/module/cpyext/include/Python.h b/pypy/module/cpyext/include/Python.h --- a/pypy/module/cpyext/include/Python.h +++ b/pypy/module/cpyext/include/Python.h @@ -102,6 +102,7 @@ #include "modsupport.h" #include "pythonrun.h" #include "pyerrors.h" +#include "sysmodule.h" #include "stringobject.h" #include "descrobject.h" #include "tupleobject.h" diff --git a/pypy/module/cpyext/test/test_sysmodule.py b/pypy/module/cpyext/test/test_sysmodule.py --- a/pypy/module/cpyext/test/test_sysmodule.py +++ b/pypy/module/cpyext/test/test_sysmodule.py @@ -14,6 +14,21 @@ assert module.get("excepthook") assert not module.get("spam_spam_spam") + def test_writestdout(self): + module = self.import_extension('foo', [ + ("writestdout", "METH_NOARGS", + """ + PySys_WriteStdout("format: %d\\n", 42); + Py_RETURN_NONE; + """)]) + import sys, StringIO + sys.stdout = StringIO.StringIO() + try: + module.writestdout() + assert sys.stdout.getvalue() == "format: 42\n" + finally: + sys.stdout = sys.__stdout__ + class TestSysModule(BaseApiTest): def test_sysmodule(self, space, api): buf = rffi.str2charp("last_tb") diff --git a/pypy/module/cpyext/include/sysmodule.h b/pypy/module/cpyext/include/sysmodule.h new file mode 100644 --- /dev/null +++ b/pypy/module/cpyext/include/sysmodule.h @@ -0,0 +1,13 @@ +#ifndef Py_SYSMODULE_H +#define Py_SYSMODULE_H +#ifdef __cplusplus +extern "C" { +#endif + +PyAPI_FUNC(void) PySys_WriteStdout(const char *format, ...); +PyAPI_FUNC(void) PySys_WriteStderr(const char *format, ...); + +#ifdef __cplusplus +} +#endif +#endif /* !Py_SYSMODULE_H */ From commits-noreply at bitbucket.org Wed Mar 23 19:06:10 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:10 +0100 (CET) Subject: [pypy-svn] pypy default: On Windows, Python.h defines snprintf and vsnprintf... Message-ID: <20110323180610.6F4082A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42876:0ab786e2a9ee Date: 2011-03-23 14:59 +0100 http://bitbucket.org/pypy/pypy/changeset/0ab786e2a9ee/ Log: On Windows, Python.h defines snprintf and vsnprintf... diff --git a/pypy/module/cpyext/include/pyerrors.h b/pypy/module/cpyext/include/pyerrors.h --- a/pypy/module/cpyext/include/pyerrors.h +++ b/pypy/module/cpyext/include/pyerrors.h @@ -15,6 +15,20 @@ PyObject *PyErr_NewExceptionWithDoc(char *name, char *doc, PyObject *base, PyObject *dict); PyObject *PyErr_Format(PyObject *exception, const char *format, ...); +/* These APIs aren't really part of the error implementation, but + often needed to format error messages; the native C lib APIs are + not available on all platforms, which is why we provide emulations + for those platforms in Python/mysnprintf.c, + WARNING: The return value of snprintf varies across platforms; do + not rely on any particular behavior; eventually the C99 defn may + be reliable. +*/ +#if defined(MS_WIN32) && !defined(HAVE_SNPRINTF) +# define HAVE_SNPRINTF +# define snprintf _snprintf +# define vsnprintf _vsnprintf +#endif + #ifdef __cplusplus } #endif From commits-noreply at bitbucket.org Wed Mar 23 19:06:12 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:12 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: implement PyFile_WriteString Message-ID: <20110323180612.940342A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42877:00a449667a1c Date: 2011-03-23 15:09 +0100 http://bitbucket.org/pypy/pypy/changeset/00a449667a1c/ Log: cpyext: implement PyFile_WriteString diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -850,13 +850,6 @@ successful invocation of Py_EnterRecursiveCall().""" raise NotImplementedError - at cpython_api([FILE, rffi.CCHARP, rffi.CCHARP, rffi.INT_real], PyObject) -def PyFile_FromFile(space, fp, name, mode, close): - """Create a new PyFileObject from the already-open standard C file - pointer, fp. The function close will be called when the file should be - closed. Return NULL on failure.""" - raise NotImplementedError - @cpython_api([PyFileObject], lltype.Void) def PyFile_IncUseCount(space, p): """Increments the PyFileObject's internal use count to indicate @@ -899,12 +892,6 @@ borrow_from() raise NotImplementedError - at cpython_api([PyFileObject, rffi.INT_real], lltype.Void) -def PyFile_SetBufSize(space, p, n): - """Available on systems with setvbuf() only. This should only be called - immediately after file object creation.""" - raise NotImplementedError - @cpython_api([PyFileObject, rffi.CCHARP], rffi.INT_real, error=0) def PyFile_SetEncoding(space, p, enc): """Set the file's encoding for Unicode output to enc. Return 1 on success and 0 @@ -941,12 +928,6 @@ appropriate exception will be set.""" raise NotImplementedError - at cpython_api([rffi.CCHARP, PyObject], rffi.INT_real, error=-1) -def PyFile_WriteString(space, s, p): - """Write string s to file object p. Return 0 on success or -1 on - failure; the appropriate exception will be set.""" - raise NotImplementedError - @cpython_api([], PyObject) def PyFloat_GetInfo(space): """Return a structseq instance which contains information about the @@ -2336,28 +2317,6 @@ (: on Unix, ; on Windows).""" raise NotImplementedError - at cpython_api([rffi.CCHARP, ], lltype.Void) -def PySys_WriteStdout(space, format): - """Write the output string described by format to sys.stdout. No - exceptions are raised, even if truncation occurs (see below). - - format should limit the total size of the formatted output string to - 1000 bytes or less -- after 1000 bytes, the output string is truncated. - In particular, this means that no unrestricted "%s" formats should occur; - these should be limited using "%.s" where is a decimal number - calculated so that plus the maximum size of other formatted text does not - exceed 1000 bytes. Also watch out for "%f", which can print hundreds of - digits for very large numbers. - - If a problem occurs, or sys.stdout is unset, the formatted message - is written to the real (C level) stdout.""" - raise NotImplementedError - - at cpython_api([rffi.CCHARP, ], lltype.Void) -def PySys_WriteStderr(space, format): - """As above, but write to sys.stderr or stderr instead.""" - raise NotImplementedError - @cpython_api([rffi.INT_real], lltype.Void) def Py_Exit(space, status): """Exit the current process. This calls Py_Finalize() and then calls the diff --git a/pypy/module/cpyext/test/test_pyfile.py b/pypy/module/cpyext/test/test_pyfile.py --- a/pypy/module/cpyext/test/test_pyfile.py +++ b/pypy/module/cpyext/test/test_pyfile.py @@ -60,3 +60,13 @@ def test_file_setbufsize(self, space, api): api.PyFile_SetBufSize() + def test_file_writestring(self, space, api, capfd): + s = rffi.str2charp("test\n") + try: + api.PyFile_WriteString(s, space.sys.get("stdout")) + finally: + rffi.free_charp(s) + out, err = capfd.readouterr() + out = out.replace('\r\n', '\n') + assert out == "test\n" + diff --git a/pypy/module/cpyext/pyfile.py b/pypy/module/cpyext/pyfile.py --- a/pypy/module/cpyext/pyfile.py +++ b/pypy/module/cpyext/pyfile.py @@ -57,3 +57,12 @@ """Available on systems with setvbuf() only. This should only be called immediately after file object creation.""" raise NotImplementedError + + at cpython_api([CONST_STRING, PyObject], rffi.INT_real, error=-1) +def PyFile_WriteString(space, s, w_p): + """Write string s to file object p. Return 0 on success or -1 on + failure; the appropriate exception will be set.""" + w_s = space.wrap(rffi.charp2str(s)) + space.call_method(w_p, "write", w_s) + return 0 + From commits-noreply at bitbucket.org Wed Mar 23 19:06:13 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:13 +0100 (CET) Subject: [pypy-svn] pypy default: Expose functions already implemented in src/modsupport.c Message-ID: <20110323180613.7419C2A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42878:eddcd9024ed0 Date: 2011-03-23 15:16 +0100 http://bitbucket.org/pypy/pypy/changeset/eddcd9024ed0/ Log: Expose functions already implemented in src/modsupport.c diff --git a/pypy/module/cpyext/include/modsupport.h b/pypy/module/cpyext/include/modsupport.h --- a/pypy/module/cpyext/include/modsupport.h +++ b/pypy/module/cpyext/include/modsupport.h @@ -38,7 +38,9 @@ PyObject * Py_BuildValue(const char *, ...); +PyObject * Py_VaBuildValue(const char *, va_list); PyObject * _Py_BuildValue_SizeT(const char *, ...); +PyObject * _Py_VaBuildValue_SizeT(const char *, va_list); int _PyArg_NoKeywords(const char *funcname, PyObject *kw); int PyArg_UnpackTuple(PyObject *args, const char *name, Py_ssize_t min, Py_ssize_t max, ...); From commits-noreply at bitbucket.org Wed Mar 23 19:06:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:14 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: Expose Py_FrozenFlag with a dummy value. Message-ID: <20110323180614.0646D2A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42879:e9a834700a41 Date: 2011-03-23 15:34 +0100 http://bitbucket.org/pypy/pypy/changeset/e9a834700a41/ Log: cpyext: Expose Py_FrozenFlag with a dummy value. diff --git a/pypy/module/cpyext/include/pythonrun.h b/pypy/module/cpyext/include/pythonrun.h --- a/pypy/module/cpyext/include/pythonrun.h +++ b/pypy/module/cpyext/include/pythonrun.h @@ -11,6 +11,8 @@ /* the -3 option will probably not be implemented */ #define Py_Py3kWarningFlag 0 +#define Py_FrozenFlag 0 + #ifdef __cplusplus } #endif From commits-noreply at bitbucket.org Wed Mar 23 19:06:14 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:14 +0100 (CET) Subject: [pypy-svn] pypy default: Translation fix Message-ID: <20110323180614.8D7F12A202E@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42880:c2b4e2be8d51 Date: 2011-03-23 17:17 +0100 http://bitbucket.org/pypy/pypy/changeset/c2b4e2be8d51/ Log: Translation fix diff --git a/pypy/module/cpyext/longobject.py b/pypy/module/cpyext/longobject.py --- a/pypy/module/cpyext/longobject.py +++ b/pypy/module/cpyext/longobject.py @@ -181,6 +181,9 @@ @cpython_api([CONST_STRING, rffi.SIZE_T, rffi.INT_real, rffi.INT_real], PyObject) def _PyLong_FromByteArray(space, bytes, n, little_endian, signed): s = rffi.charpsize2str(bytes, n) + little_endian = rffi.cast(lltype.Signed, little_endian) + signed = rffi.cast(lltype.Signed, signed) + result = rbigint() negative = False From commits-noreply at bitbucket.org Wed Mar 23 19:06:17 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:17 +0100 (CET) Subject: [pypy-svn] pypy default: Let extension modules call PyThreadState_Swap() and PyEval_AcquireThread(), Message-ID: <20110323180617.7146E2A203A@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42881:35e06cd3a334 Date: 2011-03-23 17:27 +0100 http://bitbucket.org/pypy/pypy/changeset/35e06cd3a334/ Log: Let extension modules call PyThreadState_Swap() and PyEval_AcquireThread(), in cpyext they are mostly empty functions. diff --git a/pypy/module/cpyext/stubsactive.py b/pypy/module/cpyext/stubsactive.py --- a/pypy/module/cpyext/stubsactive.py +++ b/pypy/module/cpyext/stubsactive.py @@ -34,43 +34,7 @@ PyThreadState_Clear().""" raise NotImplementedError - at cpython_api([PyThreadState], PyThreadState, error=CANNOT_FAIL) -def PyThreadState_Swap(space, tstate): - """Swap the current thread state with the thread state given by the argument - tstate, which may be NULL. The global interpreter lock must be held.""" - raise NotImplementedError - - at cpython_api([PyThreadState], lltype.Void) -def PyEval_AcquireThread(space, tstate): - """Acquire the global interpreter lock and set the current thread state to - tstate, which should not be NULL. The lock must have been created earlier. - If this thread already has the lock, deadlock ensues. This function is not - available when thread support is disabled at compile time.""" - raise NotImplementedError - - at cpython_api([PyThreadState], lltype.Void) -def PyEval_ReleaseThread(space, tstate): - """Reset the current thread state to NULL and release the global interpreter - lock. The lock must have been created earlier and must be held by the current - thread. The tstate argument, which must not be NULL, is only used to check - that it represents the current thread state --- if it isn't, a fatal error is - reported. This function is not available when thread support is disabled at - compile time.""" - raise NotImplementedError - @cpython_api([], rffi.INT_real, error=CANNOT_FAIL) def Py_MakePendingCalls(space): return 0 -PyGILState_STATE = rffi.COpaquePtr('PyGILState_STATE', - typedef='PyGILState_STATE', - compilation_info=CConfig._compilation_info_) - - at cpython_api([], PyGILState_STATE, error=CANNOT_FAIL) -def PyGILState_Ensure(space): - return 0 - - at cpython_api([PyGILState_STATE], lltype.Void) -def PyGILState_Release(space, state): - return - diff --git a/pypy/module/cpyext/pystate.py b/pypy/module/cpyext/pystate.py --- a/pypy/module/cpyext/pystate.py +++ b/pypy/module/cpyext/pystate.py @@ -1,5 +1,5 @@ -from pypy.module.cpyext.api import cpython_api, generic_cpy_call, CANNOT_FAIL,\ - cpython_struct +from pypy.module.cpyext.api import ( + cpython_api, generic_cpy_call, CANNOT_FAIL, CConfig, cpython_struct) from pypy.rpython.lltypesystem import rffi, lltype PyInterpreterState = lltype.Ptr(cpython_struct("PyInterpreterState", ())) @@ -77,6 +77,52 @@ state = space.fromcache(InterpreterState) return state.get_thread_state(space) + at cpython_api([PyThreadState], PyThreadState, error=CANNOT_FAIL) +def PyThreadState_Swap(space, tstate): + """Swap the current thread state with the thread state given by the argument + tstate, which may be NULL. The global interpreter lock must be held.""" + # All cpyext calls release and acquire the GIL, so this function has no + # side-effects + if tstate: + return lltype.nullptr(PyThreadState.TO) + else: + state = space.fromcache(InterpreterState) + return state.get_thread_state(space) + + at cpython_api([PyThreadState], lltype.Void) +def PyEval_AcquireThread(space, tstate): + """Acquire the global interpreter lock and set the current thread state to + tstate, which should not be NULL. The lock must have been created earlier. + If this thread already has the lock, deadlock ensues. This function is not + available when thread support is disabled at compile time.""" + # All cpyext calls release and acquire the GIL, so this is not necessary. + pass + + at cpython_api([PyThreadState], lltype.Void) +def PyEval_ReleaseThread(space, tstate): + """Reset the current thread state to NULL and release the global interpreter + lock. The lock must have been created earlier and must be held by the current + thread. The tstate argument, which must not be NULL, is only used to check + that it represents the current thread state --- if it isn't, a fatal error is + reported. This function is not available when thread support is disabled at + compile time.""" + # All cpyext calls release and acquire the GIL, so this is not necessary. + pass + +PyGILState_STATE = rffi.COpaquePtr('PyGILState_STATE', + typedef='PyGILState_STATE', + compilation_info=CConfig._compilation_info_) + + at cpython_api([], PyGILState_STATE, error=CANNOT_FAIL) +def PyGILState_Ensure(space): + # All cpyext calls release and acquire the GIL, so this is not necessary. + return 0 + + at cpython_api([PyGILState_STATE], lltype.Void) +def PyGILState_Release(space, state): + # All cpyext calls release and acquire the GIL, so this is not necessary. + return + @cpython_api([], PyInterpreterState, error=CANNOT_FAIL) def PyInterpreterState_Head(space): """Return the interpreter state object at the head of the list of all such objects. diff --git a/pypy/module/cpyext/test/test_pystate.py b/pypy/module/cpyext/test/test_pystate.py --- a/pypy/module/cpyext/test/test_pystate.py +++ b/pypy/module/cpyext/test/test_pystate.py @@ -43,3 +43,16 @@ ts = api.PyThreadState_Get() assert ts.c_interp == api.PyInterpreterState_Head() clear_threadstate(space) + + def test_basic_threadstate_dance(self, space, api): + # Let extension modules call these functions, + # Not sure of the semantics in pypy though. + # (cpyext always acquires and releases the GIL around calls) + tstate = api.PyThreadState_Swap(None) + assert tstate is not None + assert not api.PyThreadState_Swap(tstate) + + api.PyEval_AcquireThread(tstate) + api.PyEval_ReleaseThread(tstate) + + clear_threadstate(space) From commits-noreply at bitbucket.org Wed Mar 23 19:06:21 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 23 Mar 2011 19:06:21 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: implement PyInt_GetMax() and restore support for longs in Py_BuildValue Message-ID: <20110323180621.936442A202D@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42882:5301744fdfc1 Date: 2011-03-23 18:18 +0100 http://bitbucket.org/pypy/pypy/changeset/5301744fdfc1/ Log: cpyext: implement PyInt_GetMax() and restore support for longs in Py_BuildValue diff --git a/pypy/module/cpyext/stubs.py b/pypy/module/cpyext/stubs.py --- a/pypy/module/cpyext/stubs.py +++ b/pypy/module/cpyext/stubs.py @@ -1751,12 +1751,6 @@ """ raise NotImplementedError - at cpython_api([], lltype.Signed, error=CANNOT_FAIL) -def PyInt_GetMax(space): - """Return the system's idea of the largest integer it can handle (LONG_MAX, - as defined in the system header files).""" - raise NotImplementedError - @cpython_api([], rffi.INT_real, error=CANNOT_FAIL) def PyInt_ClearFreeList(space): """Clear the integer free list. Return the number of items that could not diff --git a/pypy/module/cpyext/intobject.py b/pypy/module/cpyext/intobject.py --- a/pypy/module/cpyext/intobject.py +++ b/pypy/module/cpyext/intobject.py @@ -5,9 +5,16 @@ cpython_api, build_type_checkers, PyObject, CONST_STRING, CANNOT_FAIL, Py_ssize_t) from pypy.rlib.rarithmetic import r_uint +import sys PyInt_Check, PyInt_CheckExact = build_type_checkers("Int") + at cpython_api([], lltype.Signed, error=CANNOT_FAIL) +def PyInt_GetMax(space): + """Return the system's idea of the largest integer it can handle (LONG_MAX, + as defined in the system header files).""" + return sys.maxint + @cpython_api([lltype.Signed], PyObject) def PyInt_FromLong(space, ival): """Create a new integer object with a value of ival. diff --git a/pypy/module/cpyext/src/modsupport.c b/pypy/module/cpyext/src/modsupport.c --- a/pypy/module/cpyext/src/modsupport.c +++ b/pypy/module/cpyext/src/modsupport.c @@ -241,13 +241,12 @@ case 'I': { - Py_FatalError("I unsupported so far"); - //unsigned int n; - //n = va_arg(*p_va, unsigned int); - //if (n > (unsigned long)PyInt_GetMax()) - // return PyLong_FromUnsignedLong((unsigned long)n); - //else - // return PyInt_FromLong(n); + unsigned int n; + n = va_arg(*p_va, unsigned int); + if (n > (unsigned long)PyInt_GetMax()) + return PyLong_FromUnsignedLong((unsigned long)n); + else + return PyInt_FromLong(n); } case 'n': @@ -260,23 +259,20 @@ case 'k': { - Py_FatalError("Py_BuildValue k unsupported so far\n"); - /* unsigned long n; */ - /* n = va_arg(*p_va, unsigned long); */ - /* if (n > (unsigned long)PyInt_GetMax()) */ - /* return PyLong_FromUnsignedLong(n); */ - /* else */ - /* return PyInt_FromLong(n); */ + unsigned long n; + n = va_arg(*p_va, unsigned long); + if (n > (unsigned long)PyInt_GetMax()) + return PyLong_FromUnsignedLong(n); + else + return PyInt_FromLong(n); } #ifdef HAVE_LONG_LONG case 'L': - Py_FatalError("Py_BuildValue L unsupported for now\n"); - //return PyLong_FromLongLong((PY_LONG_LONG)va_arg(*p_va, PY_LONG_LONG)); + return PyLong_FromLongLong((PY_LONG_LONG)va_arg(*p_va, PY_LONG_LONG)); case 'K': - Py_FatalError("Py_BuildValue K unsupported for now\n"); - //return PyLong_FromUnsignedLongLong((PY_LONG_LONG)va_arg(*p_va, unsigned PY_LONG_LONG)); + return PyLong_FromUnsignedLongLong((PY_LONG_LONG)va_arg(*p_va, unsigned PY_LONG_LONG)); #endif #ifdef Py_USING_UNICODE case 'u': From commits-noreply at bitbucket.org Wed Mar 23 20:10:08 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 20:10:08 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: No operations before jit_merge_point. That does apply to getfield as well, Message-ID: <20110323191008.75FC136C203@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42883:e187b3742ccf Date: 2011-03-23 13:09 -0600 http://bitbucket.org/pypy/pypy/changeset/e187b3742ccf/ Log: No operations before jit_merge_point. That does apply to getfield as well, otherwise we get corrupt graphs diff --git a/pypy/module/pypyjit/interp_jit.py b/pypy/module/pypyjit/interp_jit.py --- a/pypy/module/pypyjit/interp_jit.py +++ b/pypy/module/pypyjit/interp_jit.py @@ -68,14 +68,16 @@ def dispatch(self, pycode, next_instr, ec): self = hint(self, access_directly=True) next_instr = r_uint(next_instr) + is_being_profiled = self.is_being_profiled try: while True: pypyjitdriver.jit_merge_point(ec=ec, frame=self, next_instr=next_instr, pycode=pycode, - is_being_profiled=self.is_being_profiled) + is_being_profiled=is_being_profiled) co_code = pycode.co_code self.valuestackdepth = hint(self.valuestackdepth, promote=True) next_instr = self.handle_bytecode(co_code, next_instr, ec) + is_being_profiled = self.is_being_profiled except ExitFrame: return self.popvalue() From commits-noreply at bitbucket.org Wed Mar 23 20:10:09 2011 From: commits-noreply at bitbucket.org (fijal) Date: Wed, 23 Mar 2011 20:10:09 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Make this run with recent merge default Message-ID: <20110323191009.1C8CB36C203@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42884:d2c0548f77ca Date: 2011-03-23 13:09 -0600 http://bitbucket.org/pypy/pypy/changeset/d2c0548f77ca/ Log: Make this run with recent merge default diff --git a/pypy/jit/tl/pypyjit_child.py b/pypy/jit/tl/pypyjit_child.py --- a/pypy/jit/tl/pypyjit_child.py +++ b/pypy/jit/tl/pypyjit_child.py @@ -2,7 +2,6 @@ from pypy.rpython.lltypesystem import lltype from pypy.jit.metainterp import warmspot from pypy.module.pypyjit.policy import PyPyJitPolicy -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_NO_UNROLL def run_child(glob, loc): @@ -34,6 +33,5 @@ option.view = True warmspot.jittify_and_run(interp, graph, [], policy=policy, listops=True, CPUClass=CPUClass, - backendopt=True, inline=True, - optimizer=OPTIMIZER_FULL) + backendopt=True, inline=True) From commits-noreply at bitbucket.org Thu Mar 24 01:48:08 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 24 Mar 2011 01:48:08 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Don't look into as_rdict. How this could have ever worked even? Message-ID: <20110324004808.43E4D282BA1@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42885:2656b457b868 Date: 2011-03-23 18:47 -0600 http://bitbucket.org/pypy/pypy/changeset/2656b457b868/ Log: Don't look into as_rdict. How this could have ever worked even? diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -636,6 +636,7 @@ def _clear_fields(self): self.w_obj = None + @jit.dont_look_inside def _as_rdict(self): self.initialize_as_rdict() space = self.space From commits-noreply at bitbucket.org Thu Mar 24 05:40:58 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 24 Mar 2011 05:40:58 +0100 (CET) Subject: [pypy-svn] pypy default: don't repeat strlen ops. Message-ID: <20110324044058.B9967282BD9@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42886:3fe1b82365a3 Date: 2011-03-23 23:35 -0400 http://bitbucket.org/pypy/pypy/changeset/3fe1b82365a3/ Log: don't repeat strlen ops. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -27,7 +27,7 @@ def optimize_loop_1(metainterp_sd, loop, enable_opts, inline_short_preamble=True, retraced=False): - """Optimize loop.operations to remove internal overheadish operations. + """Optimize loop.operations to remove internal overheadish operations. """ optimizations = [] unroll = 'unroll' in enable_opts @@ -43,7 +43,7 @@ if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: optimizations.append(OptSimplify()) - + if inline_short_preamble: optimizations = [OptInlineShortPreamble(retraced)] + optimizations diff --git a/pypy/jit/metainterp/optimizeopt/string.py b/pypy/jit/metainterp/optimizeopt/string.py --- a/pypy/jit/metainterp/optimizeopt/string.py +++ b/pypy/jit/metainterp/optimizeopt/string.py @@ -47,7 +47,7 @@ class __extend__(optimizer.OptValue): """New methods added to the base class OptValue for this file.""" - def getstrlen(self, newoperations, mode): + def getstrlen(self, optimization, mode): if mode is mode_string: s = self.get_constant_string_spec(mode_string) if s is not None: @@ -56,12 +56,12 @@ s = self.get_constant_string_spec(mode_unicode) if s is not None: return ConstInt(len(s)) - if newoperations is None: + if optimization is None: return None self.ensure_nonnull() box = self.force_box() lengthbox = BoxInt() - newoperations.append(ResOperation(mode.STRLEN, [box], lengthbox)) + optimization.emit_operation(ResOperation(mode.STRLEN, [box], lengthbox)) return lengthbox @specialize.arg(1) @@ -72,13 +72,13 @@ else: return None - def string_copy_parts(self, newoperations, targetbox, offsetbox, mode): + def string_copy_parts(self, optimization, targetbox, offsetbox, mode): # Copies the pointer-to-string 'self' into the target string # given by 'targetbox', at the specified offset. Returns the offset # at the end of the copy. - lengthbox = self.getstrlen(newoperations, mode) + lengthbox = self.getstrlen(optimization, mode) srcbox = self.force_box() - return copy_str_content(newoperations, srcbox, targetbox, + return copy_str_content(optimization, srcbox, targetbox, CONST_0, offsetbox, lengthbox, mode) @@ -105,13 +105,12 @@ return assert self.source_op is not None self.box = box = self.source_op.result - newoperations = self.optimizer.newoperations - lengthbox = self.getstrlen(newoperations, self.mode) + lengthbox = self.getstrlen(self.optimizer, self.mode) op = ResOperation(self.mode.NEWSTR, [lengthbox], box) if not we_are_translated(): op.name = 'FORCE' - newoperations.append(op) - self.string_copy_parts(newoperations, box, CONST_0, self.mode) + self.optimizer.emit_operation(op) + self.string_copy_parts(self.optimizer, box, CONST_0, self.mode) class VStringPlainValue(VAbstractStringValue): @@ -145,14 +144,14 @@ return mode.emptystr.join([mode.chr(c.box.getint()) for c in self._chars]) - def string_copy_parts(self, newoperations, targetbox, offsetbox, mode): + def string_copy_parts(self, optimizer, targetbox, offsetbox, mode): for i in range(len(self._chars)): charbox = self._chars[i].force_box() - newoperations.append(ResOperation(mode.STRSETITEM, [targetbox, + optimizer.emit_operation(ResOperation(mode.STRSETITEM, [targetbox, offsetbox, charbox], None)) - offsetbox = _int_add(newoperations, offsetbox, CONST_1) + offsetbox = _int_add(optimizer, offsetbox, CONST_1) return offsetbox def get_args_for_fail(self, modifier): @@ -186,16 +185,16 @@ self.left = left self.right = right - def getstrlen(self, newoperations, mode): + def getstrlen(self, optimizer, mode): if self.lengthbox is None: - len1box = self.left.getstrlen(newoperations, mode) + len1box = self.left.getstrlen(optimizer, mode) if len1box is None: return None - len2box = self.right.getstrlen(newoperations, mode) + len2box = self.right.getstrlen(optimizer, mode) if len2box is None: return None - self.lengthbox = _int_add(newoperations, len1box, len2box) - # ^^^ may still be None, if newoperations is None + self.lengthbox = _int_add(optimizer, len1box, len2box) + # ^^^ may still be None, if optimizer is None return self.lengthbox @specialize.arg(1) @@ -208,10 +207,10 @@ return None return s1 + s2 - def string_copy_parts(self, newoperations, targetbox, offsetbox, mode): - offsetbox = self.left.string_copy_parts(newoperations, targetbox, + def string_copy_parts(self, optimizer, targetbox, offsetbox, mode): + offsetbox = self.left.string_copy_parts(optimizer, targetbox, offsetbox, mode) - offsetbox = self.right.string_copy_parts(newoperations, targetbox, + offsetbox = self.right.string_copy_parts(optimizer, targetbox, offsetbox, mode) return offsetbox @@ -266,9 +265,9 @@ return s1[start : start + length] return None - def string_copy_parts(self, newoperations, targetbox, offsetbox, mode): - lengthbox = self.getstrlen(newoperations, mode) - return copy_str_content(newoperations, + def string_copy_parts(self, optimizer, targetbox, offsetbox, mode): + lengthbox = self.getstrlen(optimizer, mode) + return copy_str_content(optimizer, self.vstr.force_box(), targetbox, self.vstart.force_box(), offsetbox, lengthbox, mode) @@ -299,7 +298,7 @@ return modifier.make_vstrslice(self.mode is mode_unicode) -def copy_str_content(newoperations, srcbox, targetbox, +def copy_str_content(optimizer, srcbox, targetbox, srcoffsetbox, offsetbox, lengthbox, mode): if isinstance(srcbox, ConstPtr) and isinstance(srcoffsetbox, Const): M = 5 @@ -309,23 +308,23 @@ # up to M characters are done "inline", i.e. with STRGETITEM/STRSETITEM # instead of just a COPYSTRCONTENT. for i in range(lengthbox.value): - charbox = _strgetitem(newoperations, srcbox, srcoffsetbox, mode) - srcoffsetbox = _int_add(newoperations, srcoffsetbox, CONST_1) - newoperations.append(ResOperation(mode.STRSETITEM, [targetbox, + charbox = _strgetitem(optimizer, srcbox, srcoffsetbox, mode) + srcoffsetbox = _int_add(optimizer, srcoffsetbox, CONST_1) + optimizer.emit_operation(ResOperation(mode.STRSETITEM, [targetbox, offsetbox, charbox], None)) - offsetbox = _int_add(newoperations, offsetbox, CONST_1) + offsetbox = _int_add(optimizer, offsetbox, CONST_1) else: - nextoffsetbox = _int_add(newoperations, offsetbox, lengthbox) + nextoffsetbox = _int_add(optimizer, offsetbox, lengthbox) op = ResOperation(mode.COPYSTRCONTENT, [srcbox, targetbox, srcoffsetbox, offsetbox, lengthbox], None) - newoperations.append(op) + optimizer.emit_operation(op) offsetbox = nextoffsetbox return offsetbox -def _int_add(newoperations, box1, box2): +def _int_add(optimizer, box1, box2): if isinstance(box1, ConstInt): if box1.value == 0: return box2 @@ -333,23 +332,23 @@ return ConstInt(box1.value + box2.value) elif isinstance(box2, ConstInt) and box2.value == 0: return box1 - if newoperations is None: + if optimizer is None: return None resbox = BoxInt() - newoperations.append(ResOperation(rop.INT_ADD, [box1, box2], resbox)) + optimizer.emit_operation(ResOperation(rop.INT_ADD, [box1, box2], resbox)) return resbox -def _int_sub(newoperations, box1, box2): +def _int_sub(optimizer, box1, box2): if isinstance(box2, ConstInt): if box2.value == 0: return box1 if isinstance(box1, ConstInt): return ConstInt(box1.value - box2.value) resbox = BoxInt() - newoperations.append(ResOperation(rop.INT_SUB, [box1, box2], resbox)) + optimizer.emit_operation(ResOperation(rop.INT_SUB, [box1, box2], resbox)) return resbox -def _strgetitem(newoperations, strbox, indexbox, mode): +def _strgetitem(optimizer, strbox, indexbox, mode): if isinstance(strbox, ConstPtr) and isinstance(indexbox, ConstInt): if mode is mode_string: s = strbox.getref(lltype.Ptr(rstr.STR)) @@ -358,7 +357,7 @@ s = strbox.getref(lltype.Ptr(rstr.UNICODE)) return ConstInt(ord(s.chars[indexbox.getint()])) resbox = BoxInt() - newoperations.append(ResOperation(mode.STRGETITEM, [strbox, indexbox], + optimizer.emit_operation(ResOperation(mode.STRGETITEM, [strbox, indexbox], resbox)) return resbox @@ -370,7 +369,7 @@ def reconstruct_for_next_iteration(self, optimizer, valuemap): self.enabled = True return self - + def make_vstring_plain(self, box, source_op, mode): vvalue = VStringPlainValue(self.optimizer, box, source_op, mode) self.make_equal_to(box, vvalue) @@ -431,7 +430,7 @@ value.ensure_nonnull() # if value.is_virtual() and isinstance(value, VStringSliceValue): - fullindexbox = _int_add(self.optimizer.newoperations, + fullindexbox = _int_add(self.optimizer, value.vstart.force_box(), vindex.force_box()) value = value.vstr @@ -441,7 +440,7 @@ if vindex.is_constant(): return value.getitem(vindex.box.getint()) # - resbox = _strgetitem(self.optimizer.newoperations, + resbox = _strgetitem(self.optimizer, value.force_box(),vindex.force_box(), mode) return self.getvalue(resbox) @@ -452,7 +451,7 @@ def _optimize_STRLEN(self, op, mode): value = self.getvalue(op.getarg(0)) - lengthbox = value.getstrlen(self.optimizer.newoperations, mode) + lengthbox = value.getstrlen(self, mode) self.make_equal_to(op.result, self.getvalue(lengthbox)) def optimize_CALL(self, op): @@ -498,13 +497,11 @@ vright = self.getvalue(op.getarg(2)) vleft.ensure_nonnull() vright.ensure_nonnull() - newoperations = self.optimizer.newoperations value = self.make_vstring_concat(op.result, op, mode) value.setup(vleft, vright) return True def opt_call_stroruni_STR_SLICE(self, op, mode): - newoperations = self.optimizer.newoperations vstr = self.getvalue(op.getarg(1)) vstart = self.getvalue(op.getarg(2)) vstop = self.getvalue(op.getarg(3)) @@ -518,14 +515,14 @@ return True # vstr.ensure_nonnull() - lengthbox = _int_sub(newoperations, vstop.force_box(), + lengthbox = _int_sub(self.optimizer, vstop.force_box(), vstart.force_box()) # if isinstance(vstr, VStringSliceValue): # double slicing s[i:j][k:l] vintermediate = vstr vstr = vintermediate.vstr - startbox = _int_add(newoperations, + startbox = _int_add(self.optimizer, vintermediate.vstart.force_box(), vstart.force_box()) vstart = self.getvalue(startbox) @@ -574,7 +571,7 @@ l2box = v2.getstrlen(None, mode) if isinstance(l2box, ConstInt): if l2box.value == 0: - lengthbox = v1.getstrlen(self.optimizer.newoperations, mode) + lengthbox = v1.getstrlen(self.optimizer, mode) seo = self.optimizer.send_extra_operation seo(ResOperation(rop.INT_EQ, [lengthbox, CONST_0], resultbox)) return True @@ -609,7 +606,7 @@ op = ResOperation(rop.PTR_EQ, [v1.force_box(), llhelper.CONST_NULL], resultbox) - self.optimizer.newoperations.append(op) + self.optimizer.emit_operation(op) return True # return False @@ -646,7 +643,7 @@ calldescr, func = cic.callinfo_for_oopspec(oopspecindex) op = ResOperation(rop.CALL, [ConstInt(func)] + args, result, descr=calldescr) - self.optimizer.newoperations.append(op) + self.optimizer.emit_operation(op) def propagate_forward(self, op): if not self.enabled: diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -3693,13 +3693,16 @@ guard_true(i1) [] jump(p0) """ - # The dead strlen will be eliminated be the backend. - expected = """ + preamble = """ [p0] i0 = strlen(p0) jump(p0) """ - self.optimize_strunicode_loop(ops, expected, expected) + expected = """ + [p0] + jump(p0) + """ + self.optimize_strunicode_loop(ops, expected, preamble) def test_addsub_const(self): ops = """ @@ -5150,7 +5153,21 @@ """ expected = """ [p0] + jump(p0) + """ + self.optimize_loop(ops, expected) + + def test_strlen_repeated(self): + ops = """ + [p0] i0 = strlen(p0) + i1 = strlen(p0) + i2 = int_eq(i0, i1) + guard_true(i2) [] + jump(p0) + """ + expected = """ + [p0] jump(p0) """ self.optimize_loop(ops, expected) diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -174,7 +174,7 @@ def __init__(self): pass # make rpython happy - + def propagate_forward(self, op): raise NotImplementedError @@ -183,7 +183,7 @@ def test_emittable(self, op): return self.is_emittable(op) - + def is_emittable(self, op): return self.next_optimization.test_emittable(op) @@ -239,7 +239,7 @@ def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): #return self.__class__() raise NotImplementedError - + class Optimizer(Optimization): @@ -275,20 +275,20 @@ else: optimizations = [] self.first_optimization = self - - self.optimizations = optimizations + + self.optimizations = optimizations def force_at_end_of_preamble(self): self.resumedata_memo = resume.ResumeDataLoopMemo(self.metainterp_sd) for o in self.optimizations: o.force_at_end_of_preamble() - + def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): assert optimizer is None assert valuemap is None valuemap = {} new = Optimizer(self.metainterp_sd, self.loop) - optimizations = [o.reconstruct_for_next_iteration(new, valuemap) for o in + optimizations = [o.reconstruct_for_next_iteration(new, valuemap) for o in self.optimizations] new.set_optimizations(optimizations) @@ -305,7 +305,7 @@ for key, value in self.loop_invariant_results.items(): new.loop_invariant_results[key] = \ value.get_reconstructed(new, valuemap) - + new.pure_operations = self.pure_operations new.producer = self.producer assert self.posponedop is None @@ -429,7 +429,7 @@ def test_emittable(self, op): return True - + def emit_operation(self, op): ###self.heap_op_optimizer.emitting_operation(op) self._emit_operation(op) @@ -507,7 +507,7 @@ canfold = nextop.getopnum() == rop.GUARD_NO_OVERFLOW else: nextop = None - + if canfold: for i in range(op.numargs()): if self.get_constant_box(op.getarg(i)) is None: From commits-noreply at bitbucket.org Thu Mar 24 09:07:25 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 09:07:25 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: merge default. Untested. Message-ID: <20110324080725.548412A202E@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42887:f70be93895ba Date: 2011-03-22 17:29 +0100 http://bitbucket.org/pypy/pypy/changeset/f70be93895ba/ Log: merge default. Untested. diff --git a/py/_test/parseopt.py b/py/_test/parseopt.py deleted file mode 100644 --- a/py/_test/parseopt.py +++ /dev/null @@ -1,112 +0,0 @@ -""" -thin wrapper around Python's optparse.py -adding some extra checks and ways to systematically -have Environment variables provide default values -for options. basic usage: - - >>> parser = Parser() - >>> parser.addoption("--hello", action="store_true", dest="hello") - >>> option, args = parser.parse(['--hello']) - >>> option.hello - True - >>> args - [] - -""" -import py -import optparse - -class Parser: - """ Parser for command line arguments. """ - - def __init__(self, usage=None, processopt=None): - self._anonymous = OptionGroup("custom options", parser=self) - self._groups = [] - self._processopt = processopt - self._usage = usage - self.hints = [] - - def processoption(self, option): - if self._processopt: - if option.dest: - self._processopt(option) - - def addnote(self, note): - self._notes.append(note) - - def getgroup(self, name, description="", after=None): - for group in self._groups: - if group.name == name: - return group - group = OptionGroup(name, description, parser=self) - i = 0 - for i, grp in enumerate(self._groups): - if grp.name == after: - break - self._groups.insert(i+1, group) - return group - - addgroup = getgroup - def addgroup(self, name, description=""): - py.log._apiwarn("1.1", "use getgroup() which gets-or-creates") - return self.getgroup(name, description) - - def addoption(self, *opts, **attrs): - """ add an optparse-style option. """ - self._anonymous.addoption(*opts, **attrs) - - def parse(self, args): - optparser = MyOptionParser(self) - groups = self._groups + [self._anonymous] - for group in groups: - if group.options: - desc = group.description or group.name - optgroup = optparse.OptionGroup(optparser, desc) - optgroup.add_options(group.options) - optparser.add_option_group(optgroup) - return optparser.parse_args([str(x) for x in args]) - - def parse_setoption(self, args, option): - parsedoption, args = self.parse(args) - for name, value in parsedoption.__dict__.items(): - setattr(option, name, value) - return args - - -class OptionGroup: - def __init__(self, name, description="", parser=None): - self.name = name - self.description = description - self.options = [] - self.parser = parser - - def addoption(self, *optnames, **attrs): - """ add an option to this group. """ - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=False) - - def _addoption(self, *optnames, **attrs): - option = optparse.Option(*optnames, **attrs) - self._addoption_instance(option, shortupper=True) - - def _addoption_instance(self, option, shortupper=False): - if not shortupper: - for opt in option._short_opts: - if opt[0] == '-' and opt[1].islower(): - raise ValueError("lowercase shortoptions reserved") - if self.parser: - self.parser.processoption(option) - self.options.append(option) - - -class MyOptionParser(optparse.OptionParser): - def __init__(self, parser): - self._parser = parser - optparse.OptionParser.__init__(self, usage=parser._usage) - def format_epilog(self, formatter): - hints = self._parser.hints - if hints: - s = "\n".join(["hint: " + x for x in hints]) + "\n" - s = "\n" + s + "\n" - return s - return "" diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/py/_test/pycollect.py b/py/_test/pycollect.py deleted file mode 100644 --- a/py/_test/pycollect.py +++ /dev/null @@ -1,399 +0,0 @@ -""" -Python related collection nodes. -""" -import py -import inspect -from py._test.collect import configproperty, warnoldcollect -from py._test import funcargs -from py._code.code import TerminalRepr - -class PyobjMixin(object): - def obj(): - def fget(self): - try: - return self._obj - except AttributeError: - self._obj = obj = self._getobj() - return obj - def fset(self, value): - self._obj = value - return property(fget, fset, None, "underlying python object") - obj = obj() - - def _getobj(self): - return getattr(self.parent.obj, self.name) - - def getmodpath(self, stopatmodule=True, includemodule=False): - """ return python path relative to the containing module. """ - chain = self.listchain() - chain.reverse() - parts = [] - for node in chain: - if isinstance(node, Instance): - continue - name = node.name - if isinstance(node, Module): - assert name.endswith(".py") - name = name[:-3] - if stopatmodule: - if includemodule: - parts.append(name) - break - parts.append(name) - parts.reverse() - s = ".".join(parts) - return s.replace(".[", "[") - - def _getfslineno(self): - try: - return self._fslineno - except AttributeError: - pass - obj = self.obj - # xxx let decorators etc specify a sane ordering - if hasattr(obj, 'place_as'): - obj = obj.place_as - - self._fslineno = py.code.getfslineno(obj) - return self._fslineno - - def reportinfo(self): - fspath, lineno = self._getfslineno() - modpath = self.getmodpath() - return fspath, lineno, modpath - -class PyCollectorMixin(PyobjMixin, py.test.collect.Collector): - Class = configproperty('Class') - Instance = configproperty('Instance') - Function = configproperty('Function') - Generator = configproperty('Generator') - - def funcnamefilter(self, name): - return name.startswith('test') - def classnamefilter(self, name): - return name.startswith('Test') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - # NB. we avoid random getattrs and peek in the __dict__ instead - dicts = [getattr(self.obj, '__dict__', {})] - for basecls in inspect.getmro(self.obj.__class__): - dicts.append(basecls.__dict__) - seen = {} - l = [] - for dic in dicts: - for name, obj in dic.items(): - if name in seen: - continue - seen[name] = True - if name[0] != "_": - res = self.makeitem(name, obj) - if res is None: - continue - if not isinstance(res, list): - res = [res] - l.extend(res) - l.sort(key=lambda item: item.reportinfo()[:2]) - return l - - def _deprecated_join(self, name): - if self.__class__.join != py.test.collect.Collector.join: - warnoldcollect() - return self.join(name) - - def makeitem(self, name, obj): - return self.ihook.pytest_pycollect_makeitem( - collector=self, name=name, obj=obj) - - def _istestclasscandidate(self, name, obj): - if self.classnamefilter(name) and \ - inspect.isclass(obj): - if hasinit(obj): - # XXX WARN - return False - return True - - def _genfunctions(self, name, funcobj): - module = self.getparent(Module).obj - clscol = self.getparent(Class) - cls = clscol and clscol.obj or None - metafunc = funcargs.Metafunc(funcobj, config=self.config, - cls=cls, module=module) - gentesthook = self.config.hook.pytest_generate_tests - plugins = funcargs.getplugins(self, withpy=True) - gentesthook.pcall(plugins, metafunc=metafunc) - if not metafunc._calls: - return self.Function(name, parent=self) - l = [] - for callspec in metafunc._calls: - subname = "%s[%s]" %(name, callspec.id) - function = self.Function(name=subname, parent=self, - callspec=callspec, callobj=funcobj) - l.append(function) - return l - -class Module(py.test.collect.File, PyCollectorMixin): - def _getobj(self): - return self._memoizedcall('_obj', self._importtestmodule) - - def _importtestmodule(self): - # we assume we are only called once per module - mod = self.fspath.pyimport() - #print "imported test module", mod - self.config.pluginmanager.consider_module(mod) - return mod - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - if hasattr(self.obj, 'setup_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.setup_module)[0]: - self.obj.setup_module(self.obj) - else: - self.obj.setup_module() - - def teardown(self): - if hasattr(self.obj, 'teardown_module'): - #XXX: nose compat hack, move to nose plugin - # if it takes a positional arg, its probably a py.test style one - # so we pass the current module object - if inspect.getargspec(self.obj.teardown_module)[0]: - self.obj.teardown_module(self.obj) - else: - self.obj.teardown_module() - -class Class(PyCollectorMixin, py.test.collect.Collector): - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - return [self.Instance(name="()", parent=self)] - - def setup(self): - if getattr(self.obj, 'disabled', 0): - py.log._apiwarn(">1.1.1", "%r uses 'disabled' which is deprecated, " - "use pytestmark=..., see pytest_skipping plugin" % (self.obj,)) - py.test.skip("%r is disabled" %(self.obj,)) - setup_class = getattr(self.obj, 'setup_class', None) - if setup_class is not None: - setup_class = getattr(setup_class, 'im_func', setup_class) - setup_class(self.obj) - - def teardown(self): - teardown_class = getattr(self.obj, 'teardown_class', None) - if teardown_class is not None: - teardown_class = getattr(teardown_class, 'im_func', teardown_class) - teardown_class(self.obj) - -class Instance(PyCollectorMixin, py.test.collect.Collector): - def _getobj(self): - return self.parent.obj() - def Function(self): - return getattr(self.obj, 'Function', - PyCollectorMixin.Function.__get__(self)) # XXX for python 2.2 - def _keywords(self): - return [] - Function = property(Function) - - #def __repr__(self): - # return "<%s of '%s'>" %(self.__class__.__name__, - # self.parent.obj.__name__) - - def newinstance(self): - self.obj = self._getobj() - return self.obj - -class FunctionMixin(PyobjMixin): - """ mixin for the code common to Function and Generator. - """ - - def setup(self): - """ perform setup for this test function. """ - if inspect.ismethod(self.obj): - name = 'setup_method' - else: - name = 'setup_function' - if isinstance(self.parent, Instance): - obj = self.parent.newinstance() - self.obj = self._getobj() - else: - obj = self.parent.obj - setup_func_or_method = getattr(obj, name, None) - if setup_func_or_method is not None: - setup_func_or_method(self.obj) - - def teardown(self): - """ perform teardown for this test function. """ - if inspect.ismethod(self.obj): - name = 'teardown_method' - else: - name = 'teardown_function' - obj = self.parent.obj - teardown_func_or_meth = getattr(obj, name, None) - if teardown_func_or_meth is not None: - teardown_func_or_meth(self.obj) - - def _prunetraceback(self, traceback): - if hasattr(self, '_obj') and not self.config.option.fulltrace: - code = py.code.Code(self.obj) - path, firstlineno = code.path, code.firstlineno - ntraceback = traceback.cut(path=path, firstlineno=firstlineno) - if ntraceback == traceback: - ntraceback = ntraceback.cut(path=path) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - - def _repr_failure_py(self, excinfo, style="long"): - if excinfo.errisinstance(funcargs.FuncargRequest.LookupError): - fspath, lineno, msg = self.reportinfo() - lines, _ = inspect.getsourcelines(self.obj) - for i, line in enumerate(lines): - if line.strip().startswith('def'): - return FuncargLookupErrorRepr(fspath, lineno, - lines[:i+1], str(excinfo.value)) - return super(FunctionMixin, self)._repr_failure_py(excinfo, - style=style) - - def repr_failure(self, excinfo, outerr=None): - assert outerr is None, "XXX outerr usage is deprecated" - return self._repr_failure_py(excinfo, - style=self.config.getvalue("tbstyle")) - - shortfailurerepr = "F" - -class FuncargLookupErrorRepr(TerminalRepr): - def __init__(self, filename, firstlineno, deflines, errorstring): - self.deflines = deflines - self.errorstring = errorstring - self.filename = filename - self.firstlineno = firstlineno - - def toterminal(self, tw): - tw.line() - for line in self.deflines: - tw.line(" " + line.strip()) - for line in self.errorstring.split("\n"): - tw.line(" " + line.strip(), red=True) - tw.line() - tw.line("%s:%d" % (self.filename, self.firstlineno+1)) - -class Generator(FunctionMixin, PyCollectorMixin, py.test.collect.Collector): - def collect(self): - # test generators are seen as collectors but they also - # invoke setup/teardown on popular request - # (induced by the common "test_*" naming shared with normal tests) - self.config._setupstate.prepare(self) - l = [] - seen = {} - for i, x in enumerate(self.obj()): - name, call, args = self.getcallargs(x) - if not py.builtin.callable(call): - raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) - if name is None: - name = "[%d]" % i - else: - name = "['%s']" % name - if name in seen: - raise ValueError("%r generated tests with non-unique name %r" %(self, name)) - seen[name] = True - l.append(self.Function(name, self, args=args, callobj=call)) - return l - - def getcallargs(self, obj): - if not isinstance(obj, (tuple, list)): - obj = (obj,) - # explict naming - if isinstance(obj[0], py.builtin._basestring): - name = obj[0] - obj = obj[1:] - else: - name = None - call, args = obj[0], obj[1:] - return name, call, args - - -# -# Test Items -# -_dummy = object() -class Function(FunctionMixin, py.test.collect.Item): - """ a Function Item is responsible for setting up - and executing a Python callable test object. - """ - _genid = None - def __init__(self, name, parent=None, args=None, config=None, - callspec=None, callobj=_dummy): - super(Function, self).__init__(name, parent, config=config) - self._args = args - if self._isyieldedfunction(): - assert not callspec, "yielded functions (deprecated) cannot have funcargs" - else: - if callspec is not None: - self.funcargs = callspec.funcargs or {} - self._genid = callspec.id - if hasattr(callspec, "param"): - self._requestparam = callspec.param - else: - self.funcargs = {} - if callobj is not _dummy: - self._obj = callobj - self.function = getattr(self.obj, 'im_func', self.obj) - - def _getobj(self): - name = self.name - i = name.find("[") # parametrization - if i != -1: - name = name[:i] - return getattr(self.parent.obj, name) - - def _isyieldedfunction(self): - return self._args is not None - - def readkeywords(self): - d = super(Function, self).readkeywords() - d.update(py.builtin._getfuncdict(self.obj)) - return d - - def runtest(self): - """ execute the underlying test function. """ - self.ihook.pytest_pyfunc_call(pyfuncitem=self) - - def setup(self): - super(Function, self).setup() - if hasattr(self, 'funcargs'): - funcargs.fillfuncargs(self) - - def __eq__(self, other): - try: - return (self.name == other.name and - self._args == other._args and - self.parent == other.parent and - self.obj == other.obj and - getattr(self, '_genid', None) == - getattr(other, '_genid', None) - ) - except AttributeError: - pass - return False - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.parent, self.name)) - -def hasinit(obj): - init = getattr(obj, '__init__', None) - if init: - if init != object.__init__: - return True diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py --- a/pypy/rpython/llinterp.py +++ b/pypy/rpython/llinterp.py @@ -5,6 +5,7 @@ from pypy.rpython.lltypesystem import rclass from pypy.rpython.ootypesystem import ootype from pypy.rlib.objectmodel import ComputedIntSymbolic, CDefinedIntSymbolic +from pypy.rlib.objectmodel import Symbolic from pypy.rlib import rstackovf import sys, os @@ -1152,7 +1153,9 @@ # special case if type(x) is CDefinedIntSymbolic: x = x.default - assert isinstance(x, int) + # if type(x) is a subclass of Symbolic, bool(x) will usually raise + # a TypeError -- unless __nonzero__ has been explicitly overridden. + assert isinstance(x, (int, Symbolic)) return bool(x) # read frame var support diff --git a/py/_cmdline/pycountloc.py b/py/_cmdline/pycountloc.py deleted file mode 100755 --- a/py/_cmdline/pycountloc.py +++ /dev/null @@ -1,94 +0,0 @@ -#!/usr/bin/env python - -# hands on script to compute the non-empty Lines of Code -# for tests and non-test code - -"""\ -py.countloc [PATHS] - -Count (non-empty) lines of python code and number of python files recursively -starting from a list of paths given on the command line (starting from the -current working directory). Distinguish between test files and normal ones and -report them separately. -""" -import py - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - (options, args) = parser.parse_args() - countloc(args) - -def nodot(p): - return p.check(dotfile=0) - -class FileCounter(object): - def __init__(self): - self.file2numlines = {} - self.numlines = 0 - self.numfiles = 0 - - def addrecursive(self, directory, fil="*.py", rec=nodot): - for x in directory.visit(fil, rec): - self.addfile(x) - - def addfile(self, fn, emptylines=False): - if emptylines: - s = len(p.readlines()) - else: - s = 0 - for i in fn.readlines(): - if i.strip(): - s += 1 - self.file2numlines[fn] = s - self.numfiles += 1 - self.numlines += s - - def getnumlines(self, fil): - numlines = 0 - for path, value in self.file2numlines.items(): - if fil(path): - numlines += value - return numlines - - def getnumfiles(self, fil): - numfiles = 0 - for path in self.file2numlines: - if fil(path): - numfiles += 1 - return numfiles - -def get_loccount(locations=None): - if locations is None: - localtions = [py.path.local()] - counter = FileCounter() - for loc in locations: - counter.addrecursive(loc, '*.py', rec=nodot) - - def istestfile(p): - return p.check(fnmatch='test_*.py') - isnottestfile = lambda x: not istestfile(x) - - numfiles = counter.getnumfiles(isnottestfile) - numlines = counter.getnumlines(isnottestfile) - numtestfiles = counter.getnumfiles(istestfile) - numtestlines = counter.getnumlines(istestfile) - - return counter, numfiles, numlines, numtestfiles, numtestlines - -def countloc(paths=None): - if not paths: - paths = ['.'] - locations = [py.path.local(x) for x in paths] - (counter, numfiles, numlines, numtestfiles, - numtestlines) = get_loccount(locations) - - items = counter.file2numlines.items() - items.sort(lambda x,y: cmp(x[1], y[1])) - for x, y in items: - print("%3d %30s" % (y,x)) - - print("%30s %3d" %("number of testfiles", numtestfiles)) - print("%30s %3d" %("number of non-empty testlines", numtestlines)) - print("%30s %3d" %("number of files", numfiles)) - print("%30s %3d" %("number of non-empty lines", numlines)) - diff --git a/py/_cmdline/pyconvert_unittest.py b/py/_cmdline/pyconvert_unittest.py deleted file mode 100644 --- a/py/_cmdline/pyconvert_unittest.py +++ /dev/null @@ -1,253 +0,0 @@ -import re -import sys - -try: - import parser -except ImportError: - parser = None - -d={} -# d is the dictionary of unittest changes, keyed to the old name -# used by unittest. -# d[old][0] is the new replacement function. -# d[old][1] is the operator you will substitute, or '' if there is none. -# d[old][2] is the possible number of arguments to the unittest -# function. - -# Old Unittest Name new name operator # of args -d['assertRaises'] = ('raises', '', ['Any']) -d['fail'] = ('raise AssertionError', '', [0,1]) -d['assert_'] = ('assert', '', [1,2]) -d['failIf'] = ('assert not', '', [1,2]) -d['assertEqual'] = ('assert', ' ==', [2,3]) -d['failIfEqual'] = ('assert not', ' ==', [2,3]) -d['assertIn'] = ('assert', ' in', [2,3]) -d['assertNotIn'] = ('assert', ' not in', [2,3]) -d['assertNotEqual'] = ('assert', ' !=', [2,3]) -d['failUnlessEqual'] = ('assert', ' ==', [2,3]) -d['assertAlmostEqual'] = ('assert round', ' ==', [2,3,4]) -d['failIfAlmostEqual'] = ('assert not round', ' ==', [2,3,4]) -d['assertNotAlmostEqual'] = ('assert round', ' !=', [2,3,4]) -d['failUnlessAlmostEquals'] = ('assert round', ' ==', [2,3,4]) - -# the list of synonyms -d['failUnlessRaises'] = d['assertRaises'] -d['failUnless'] = d['assert_'] -d['assertEquals'] = d['assertEqual'] -d['assertNotEquals'] = d['assertNotEqual'] -d['assertAlmostEquals'] = d['assertAlmostEqual'] -d['assertNotAlmostEquals'] = d['assertNotAlmostEqual'] - -# set up the regular expressions we will need -leading_spaces = re.compile(r'^(\s*)') # this never fails - -pat = '' -for k in d.keys(): # this complicated pattern to match all unittests - pat += '|' + r'^(\s*)' + 'self.' + k + r'\(' # \tself.whatever( - -old_names = re.compile(pat[1:]) -linesep='\n' # nobody will really try to convert files not read - # in text mode, will they? - - -def blocksplitter(fp): - '''split a file into blocks that are headed by functions to rename''' - - blocklist = [] - blockstring = '' - - for line in fp: - interesting = old_names.match(line) - if interesting : - if blockstring: - blocklist.append(blockstring) - blockstring = line # reset the block - else: - blockstring += line - - blocklist.append(blockstring) - return blocklist - -def rewrite_utest(block): - '''rewrite every block to use the new utest functions''' - - '''returns the rewritten unittest, unless it ran into problems, - in which case it just returns the block unchanged. - ''' - utest = old_names.match(block) - - if not utest: - return block - - old = utest.group(0).lstrip()[5:-1] # the name we want to replace - new = d[old][0] # the name of the replacement function - op = d[old][1] # the operator you will use , or '' if there is none. - possible_args = d[old][2] # a list of the number of arguments the - # unittest function could possibly take. - - if possible_args == ['Any']: # just rename assertRaises & friends - return re.sub('self.'+old, new, block) - - message_pos = possible_args[-1] - # the remaining unittests can have an optional message to print - # when they fail. It is always the last argument to the function. - - try: - indent, argl, trailer = decompose_unittest(old, block) - - except SyntaxError: # but we couldn't parse it! - return block - - argnum = len(argl) - if argnum not in possible_args: - # sanity check - this one isn't real either - return block - - elif argnum == message_pos: - message = argl[-1] - argl = argl[:-1] - else: - message = None - - if argnum is 0 or (argnum is 1 and argnum is message_pos): #unittest fail() - string = '' - if message: - message = ' ' + message - - elif message_pos is 4: # assertAlmostEqual & friends - try: - pos = argl[2].lstrip() - except IndexError: - pos = '7' # default if none is specified - string = '(%s -%s, %s)%s 0' % (argl[0], argl[1], pos, op ) - - else: # assert_, assertEquals and all the rest - string = ' ' + op.join(argl) - - if message: - string = string + ',' + message - - return indent + new + string + trailer - -def decompose_unittest(old, block): - '''decompose the block into its component parts''' - - ''' returns indent, arglist, trailer - indent -- the indentation - arglist -- the arguments to the unittest function - trailer -- any extra junk after the closing paren, such as #commment - ''' - - indent = re.match(r'(\s*)', block).group() - pat = re.search('self.' + old + r'\(', block) - - args, trailer = get_expr(block[pat.end():], ')') - arglist = break_args(args, []) - - if arglist == ['']: # there weren't any - return indent, [], trailer - - for i in range(len(arglist)): - try: - parser.expr(arglist[i].lstrip('\t ')) - except SyntaxError: - if i == 0: - arglist[i] = '(' + arglist[i] + ')' - else: - arglist[i] = ' (' + arglist[i] + ')' - - return indent, arglist, trailer - -def break_args(args, arglist): - '''recursively break a string into a list of arguments''' - try: - first, rest = get_expr(args, ',') - if not rest: - return arglist + [first] - else: - return [first] + break_args(rest, arglist) - except SyntaxError: - return arglist + [args] - -def get_expr(s, char): - '''split a string into an expression, and the rest of the string''' - - pos=[] - for i in range(len(s)): - if s[i] == char: - pos.append(i) - if pos == []: - raise SyntaxError # we didn't find the expected char. Ick. - - for p in pos: - # make the python parser do the hard work of deciding which comma - # splits the string into two expressions - try: - parser.expr('(' + s[:p] + ')') - return s[:p], s[p+1:] - except SyntaxError: # It's not an expression yet - pass - raise SyntaxError # We never found anything that worked. - - -def main(): - import sys - import py - - usage = "usage: %prog [-s [filename ...] | [-i | -c filename ...]]" - optparser = py.std.optparse.OptionParser(usage) - - def select_output (option, opt, value, optparser, **kw): - if hasattr(optparser, 'output'): - optparser.error( - 'Cannot combine -s -i and -c options. Use one only.') - else: - optparser.output = kw['output'] - - optparser.add_option("-s", "--stdout", action="callback", - callback=select_output, - callback_kwargs={'output':'stdout'}, - help="send your output to stdout") - - optparser.add_option("-i", "--inplace", action="callback", - callback=select_output, - callback_kwargs={'output':'inplace'}, - help="overwrite files in place") - - optparser.add_option("-c", "--copy", action="callback", - callback=select_output, - callback_kwargs={'output':'copy'}, - help="copy files ... fn.py --> fn_cp.py") - - options, args = optparser.parse_args() - - output = getattr(optparser, 'output', 'stdout') - - if output in ['inplace', 'copy'] and not args: - optparser.error( - '-i and -c option require at least one filename') - - if not args: - s = '' - for block in blocksplitter(sys.stdin): - s += rewrite_utest(block) - sys.stdout.write(s) - - else: - for infilename in args: # no error checking to see if we can open, etc. - infile = file(infilename) - s = '' - for block in blocksplitter(infile): - s += rewrite_utest(block) - if output == 'inplace': - outfile = file(infilename, 'w+') - elif output == 'copy': # yes, just go clobber any existing .cp - outfile = file (infilename[:-3]+ '_cp.py', 'w+') - else: - outfile = sys.stdout - - outfile.write(s) - - -if __name__ == '__main__': - main() diff --git a/pypy/module/cpyext/test/conftest.py b/pypy/module/cpyext/test/conftest.py --- a/pypy/module/cpyext/test/conftest.py +++ b/pypy/module/cpyext/test/conftest.py @@ -1,5 +1,6 @@ import py -from pypy.conftest import option, gettestobjspace +import pytest +from pypy.conftest import gettestobjspace def pytest_ignore_collect(path, config): if config.option.runappdirect: diff --git a/py/_cmdline/__init__.py b/py/_cmdline/__init__.py deleted file mode 100644 --- a/py/_cmdline/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/py/_path/gateway/channeltest.py b/py/_path/gateway/channeltest.py deleted file mode 100644 --- a/py/_path/gateway/channeltest.py +++ /dev/null @@ -1,65 +0,0 @@ -import threading - - -class PathServer: - - def __init__(self, channel): - self.channel = channel - self.C2P = {} - self.next_id = 0 - threading.Thread(target=self.serve).start() - - def p2c(self, path): - id = self.next_id - self.next_id += 1 - self.C2P[id] = path - return id - - def command_LIST(self, id, *args): - path = self.C2P[id] - answer = [(self.p2c(p), p.basename) for p in path.listdir(*args)] - self.channel.send(answer) - - def command_DEL(self, id): - del self.C2P[id] - - def command_GET(self, id, spec): - path = self.C2P[id] - self.channel.send(path._getbyspec(spec)) - - def command_READ(self, id): - path = self.C2P[id] - self.channel.send(path.read()) - - def command_JOIN(self, id, resultid, *args): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.join(*args) - - def command_DIRPATH(self, id, resultid): - path = self.C2P[id] - assert resultid not in self.C2P - self.C2P[resultid] = path.dirpath() - - def serve(self): - try: - while 1: - msg = self.channel.receive() - meth = getattr(self, 'command_' + msg[0]) - meth(*msg[1:]) - except EOFError: - pass - -if __name__ == '__main__': - import py - gw = execnet.PopenGateway() - channel = gw._channelfactory.new() - srv = PathServer(channel) - c = gw.remote_exec(""" - import remotepath - p = remotepath.RemotePath(channel.receive(), channel.receive()) - channel.send(len(p.listdir())) - """) - c.send(channel) - c.send(srv.p2c(py.path.local('/tmp'))) - print(c.receive()) diff --git a/py/_plugin/standalonetemplate.py b/py/_plugin/standalonetemplate.py deleted file mode 100755 --- a/py/_plugin/standalonetemplate.py +++ /dev/null @@ -1,63 +0,0 @@ -#! /usr/bin/env python - -sources = """ - at SOURCES@""" - -import sys -import base64 -import zlib -import imp - -class DictImporter(object): - def __init__(self, sources): - self.sources = sources - - def find_module(self, fullname, path=None): - if fullname in self.sources: - return self - if fullname+'.__init__' in self.sources: - return self - return None - - def load_module(self, fullname): - # print "load_module:", fullname - from types import ModuleType - try: - s = self.sources[fullname] - is_pkg = False - except KeyError: - s = self.sources[fullname+'.__init__'] - is_pkg = True - - co = compile(s, fullname, 'exec') - module = sys.modules.setdefault(fullname, ModuleType(fullname)) - module.__file__ = "%s/%s" % (__file__, fullname) - module.__loader__ = self - if is_pkg: - module.__path__ = [fullname] - - do_exec(co, module.__dict__) - return sys.modules[fullname] - - def get_source(self, name): - res = self.sources.get(name) - if res is None: - res = self.sources.get(name+'.__init__') - return res - -if __name__ == "__main__": - if sys.version_info >= (3,0): - exec("def do_exec(co, loc): exec(co, loc)\n") - import pickle - sources = sources.encode("ascii") # ensure bytes - sources = pickle.loads(zlib.decompress(base64.decodebytes(sources))) - else: - import cPickle as pickle - exec("def do_exec(co, loc): exec co in loc\n") - sources = pickle.loads(zlib.decompress(base64.decodestring(sources))) - - importer = DictImporter(sources) - sys.meta_path.append(importer) - - import py - py.cmdline.pytest() diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/py/_test/collect.py b/py/_test/collect.py deleted file mode 100644 --- a/py/_test/collect.py +++ /dev/null @@ -1,418 +0,0 @@ -""" -test collection nodes, forming a tree, Items are leafs. -""" -import py - -def configproperty(name): - def fget(self): - #print "retrieving %r property from %s" %(name, self.fspath) - return self.config._getcollectclass(name, self.fspath) - return property(fget) - -class HookProxy: - def __init__(self, node): - self.node = node - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - hookmethod = getattr(self.node.config.hook, name) - def call_matching_hooks(**kwargs): - plugins = self.node.config._getmatchingplugins(self.node.fspath) - return hookmethod.pcall(plugins, **kwargs) - return call_matching_hooks - -class Node(object): - """ base class for all Nodes in the collection tree. - Collector subclasses have children, Items are terminal nodes. - """ - def __init__(self, name, parent=None, config=None): - self.name = name - self.parent = parent - self.config = config or parent.config - self.fspath = getattr(parent, 'fspath', None) - self.ihook = HookProxy(self) - - def _reraiseunpicklingproblem(self): - if hasattr(self, '_unpickle_exc'): - py.builtin._reraise(*self._unpickle_exc) - - # - # note to myself: Pickling is uh. - # - def __getstate__(self): - return (self.name, self.parent) - def __setstate__(self, nameparent): - name, parent = nameparent - try: - colitems = parent._memocollect() - for colitem in colitems: - if colitem.name == name: - # we are a copy that will not be returned - # by our parent - self.__dict__ = colitem.__dict__ - break - else: - raise ValueError("item %r not found in parent collection %r" %( - name, [x.name for x in colitems])) - except KeyboardInterrupt: - raise - except Exception: - # our parent can't collect us but we want unpickling to - # otherwise continue - self._reraiseunpicklingproblem() will - # reraise the problem - self._unpickle_exc = py.std.sys.exc_info() - self.name = name - self.parent = parent - self.config = parent.config - - def __repr__(self): - if getattr(self.config.option, 'debug', False): - return "<%s %r %0x>" %(self.__class__.__name__, - getattr(self, 'name', None), id(self)) - else: - return "<%s %r>" %(self.__class__.__name__, - getattr(self, 'name', None)) - - # methods for ordering nodes - - def __eq__(self, other): - if not isinstance(other, Node): - return False - return self.name == other.name and self.parent == other.parent - - def __ne__(self, other): - return not self == other - - def __hash__(self): - return hash((self.name, self.parent)) - - def setup(self): - pass - - def teardown(self): - pass - - def _memoizedcall(self, attrname, function): - exattrname = "_ex_" + attrname - failure = getattr(self, exattrname, None) - if failure is not None: - py.builtin._reraise(failure[0], failure[1], failure[2]) - if hasattr(self, attrname): - return getattr(self, attrname) - try: - res = function() - except (KeyboardInterrupt, SystemExit): - raise - except: - failure = py.std.sys.exc_info() - setattr(self, exattrname, failure) - raise - setattr(self, attrname, res) - return res - - def listchain(self): - """ return list of all parent collectors up to self, - starting from root of collection tree. """ - l = [self] - while 1: - x = l[0] - if x.parent is not None and x.parent.parent is not None: - l.insert(0, x.parent) - else: - return l - - def listnames(self): - return [x.name for x in self.listchain()] - - def getparent(self, cls): - current = self - while current and not isinstance(current, cls): - current = current.parent - return current - - def readkeywords(self): - return dict([(x, True) for x in self._keywords()]) - - def _keywords(self): - return [self.name] - - def _skipbykeyword(self, keywordexpr): - """ return True if they given keyword expression means to - skip this collector/item. - """ - if not keywordexpr: - return - chain = self.listchain() - for key in filter(None, keywordexpr.split()): - eor = key[:1] == '-' - if eor: - key = key[1:] - if not (eor ^ self._matchonekeyword(key, chain)): - return True - - def _matchonekeyword(self, key, chain): - elems = key.split(".") - # XXX O(n^2), anyone cares? - chain = [item.readkeywords() for item in chain if item._keywords()] - for start, _ in enumerate(chain): - if start + len(elems) > len(chain): - return False - for num, elem in enumerate(elems): - for keyword in chain[num + start]: - ok = False - if elem in keyword: - ok = True - break - if not ok: - break - if num == len(elems) - 1 and ok: - return True - return False - - def _prunetraceback(self, traceback): - return traceback - - def _repr_failure_py(self, excinfo, style=None): - excinfo.traceback = self._prunetraceback(excinfo.traceback) - # XXX should excinfo.getrepr record all data and toterminal() - # process it? - if style is None: - if self.config.option.tbstyle == "short": - style = "short" - else: - style = "long" - return excinfo.getrepr(funcargs=True, - showlocals=self.config.option.showlocals, - style=style) - - repr_failure = _repr_failure_py - shortfailurerepr = "F" - -class Collector(Node): - """ - Collector instances create children through collect() - and thus iteratively build a tree. attributes:: - - parent: attribute pointing to the parent collector - (or None if this is the root collector) - name: basename of this collector object - """ - Directory = configproperty('Directory') - Module = configproperty('Module') - - def collect(self): - """ returns a list of children (items and collectors) - for this collection node. - """ - raise NotImplementedError("abstract") - - def collect_by_name(self, name): - """ return a child matching the given name, else None. """ - for colitem in self._memocollect(): - if colitem.name == name: - return colitem - - def repr_failure(self, excinfo, outerr=None): - """ represent a failure. """ - assert outerr is None, "XXX deprecated" - return self._repr_failure_py(excinfo) - - def _memocollect(self): - """ internal helper method to cache results of calling collect(). """ - return self._memoizedcall('_collected', self.collect) - - # ********************************************************************** - # DEPRECATED METHODS - # ********************************************************************** - - def _deprecated_collect(self): - # avoid recursion: - # collect -> _deprecated_collect -> custom run() -> - # super().run() -> collect - attrname = '_depcollectentered' - if hasattr(self, attrname): - return - setattr(self, attrname, True) - method = getattr(self.__class__, 'run', None) - if method is not None and method != Collector.run: - warnoldcollect(function=method) - names = self.run() - return [x for x in [self.join(name) for name in names] if x] - - def run(self): - """ DEPRECATED: returns a list of names available from this collector. - You can return an empty list. Callers of this method - must take care to catch exceptions properly. - """ - return [colitem.name for colitem in self._memocollect()] - - def join(self, name): - """ DEPRECATED: return a child collector or item for the given name. - If the return value is None there is no such child. - """ - return self.collect_by_name(name) - - def _prunetraceback(self, traceback): - if hasattr(self, 'fspath'): - path = self.fspath - ntraceback = traceback.cut(path=self.fspath) - if ntraceback == traceback: - ntraceback = ntraceback.cut(excludepath=py._pydir) - traceback = ntraceback.filter() - return traceback - -class FSCollector(Collector): - def __init__(self, fspath, parent=None, config=None): - fspath = py.path.local(fspath) - super(FSCollector, self).__init__(fspath.basename, parent, config=config) - self.fspath = fspath - - def __getstate__(self): - # RootCollector.getbynames() inserts a directory which we need - # to throw out here for proper re-instantiation - if isinstance(self.parent.parent, RootCollector): - assert self.parent.fspath == self.parent.parent.fspath, self.parent - return (self.name, self.parent.parent) # shortcut - return super(Collector, self).__getstate__() - -class File(FSCollector): - """ base class for collecting tests from a file. """ - -class Directory(FSCollector): - def recfilter(self, path): - if path.check(dir=1, dotfile=0): - return path.basename not in ('CVS', '_darcs', '{arch}') - - def collect(self): - l = self._deprecated_collect() - if l is not None: - return l - l = [] - for path in self.fspath.listdir(sort=True): - res = self.consider(path) - if res is not None: - if isinstance(res, (list, tuple)): - l.extend(res) - else: - l.append(res) - return l - - def consider(self, path): - if self.ihook.pytest_ignore_collect(path=path, config=self.config): - return - if path.check(file=1): - res = self.consider_file(path) - elif path.check(dir=1): - res = self.consider_dir(path) - else: - res = None - if isinstance(res, list): - # throw out identical results - l = [] - for x in res: - if x not in l: - assert x.parent == self, (x.parent, self) - assert x.fspath == path, (x.fspath, path) - l.append(x) - res = l - return res - - def consider_file(self, path): - return self.ihook.pytest_collect_file(path=path, parent=self) - - def consider_dir(self, path, usefilters=None): - if usefilters is not None: - py.log._apiwarn("0.99", "usefilters argument not needed") - return self.ihook.pytest_collect_directory(path=path, parent=self) - -class Item(Node): - """ a basic test item. """ - def _deprecated_testexecution(self): - if self.__class__.run != Item.run: - warnoldtestrun(function=self.run) - elif self.__class__.execute != Item.execute: - warnoldtestrun(function=self.execute) - else: - return False - self.run() - return True - - def run(self): - """ deprecated, here because subclasses might call it. """ - return self.execute(self.obj) - - def execute(self, obj): - """ deprecated, here because subclasses might call it. """ - return obj() - - def reportinfo(self): - return self.fspath, None, "" - -def warnoldcollect(function=None): - py.log._apiwarn("1.0", - "implement collector.collect() instead of " - "collector.run() and collector.join()", - stacklevel=2, function=function) - -def warnoldtestrun(function=None): - py.log._apiwarn("1.0", - "implement item.runtest() instead of " - "item.run() and item.execute()", - stacklevel=2, function=function) - - - -class RootCollector(Directory): - def __init__(self, config): - Directory.__init__(self, config.topdir, parent=None, config=config) - self.name = None - - def __repr__(self): - return "" %(self.fspath,) - - def getbynames(self, names): - current = self.consider(self.config.topdir) - while names: - name = names.pop(0) - if name == ".": # special "identity" name - continue - l = [] - for x in current._memocollect(): - if x.name == name: - l.append(x) - elif x.fspath == current.fspath.join(name): - l.append(x) - elif x.name == "()": - names.insert(0, name) - l.append(x) - break - if not l: - raise ValueError("no node named %r below %r" %(name, current)) - current = l[0] - return current - - def totrail(self, node): - chain = node.listchain() - names = [self._getrelpath(chain[0].fspath)] - names += [x.name for x in chain[1:]] - return names - - def fromtrail(self, trail): - return self.config._rootcol.getbynames(trail) - - def _getrelpath(self, fspath): - topdir = self.config.topdir - relpath = fspath.relto(topdir) - if not relpath: - if fspath == topdir: - relpath = "." - else: - raise ValueError("%r not relative to topdir %s" - %(self.fspath, topdir)) - return relpath - - def __getstate__(self): - return self.config - - def __setstate__(self, config): - self.__init__(config) diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/py/bin/py.cleanup b/py/bin/py.cleanup deleted file mode 100755 --- a/py/bin/py.cleanup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycleanup() \ No newline at end of file diff --git a/py/_path/gateway/channeltest2.py b/py/_path/gateway/channeltest2.py deleted file mode 100644 --- a/py/_path/gateway/channeltest2.py +++ /dev/null @@ -1,21 +0,0 @@ -import py -from remotepath import RemotePath - - -SRC = open('channeltest.py', 'r').read() - -SRC += ''' -import py -srv = PathServer(channel.receive()) -channel.send(srv.p2c(py.path.local("/tmp"))) -''' - - -#gw = execnet.SshGateway('codespeak.net') -gw = execnet.PopenGateway() -gw.remote_init_threads(5) -c = gw.remote_exec(SRC, stdout=py.std.sys.stdout, stderr=py.std.sys.stderr) -subchannel = gw._channelfactory.new() -c.send(subchannel) - -p = RemotePath(subchannel, c.receive()) diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -185,6 +185,15 @@ # XXX leaks if a unicode2wcharp() fails with MemoryError # and was not the first in this function freeme = arg + elif TARGET is VOIDP: + if arg is None: + arg = lltype.nullptr(VOIDP.TO) + elif isinstance(arg, str): + arg = str2charp(arg) + freeme = arg + elif isinstance(arg, unicode): + arg = unicode2wcharp(arg) + freeme = arg elif _isfunctype(TARGET) and not _isllptr(arg): # XXX pass additional arguments if invoke_around_handlers: @@ -550,9 +559,8 @@ r_singlefloat = rarithmetic.r_singlefloat # void * - for now, represented as char * -VOIDP = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True})) -VOIDP_real = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True})) -NULL = lltype.nullptr(VOIDP.TO) +VOIDP = lltype.Ptr(lltype.Array(lltype.Char, hints={'nolength': True, 'render_as_void': True})) +NULL = None # void ** VOIDPP = CArrayPtr(VOIDP) @@ -640,6 +648,7 @@ data_start = cast_ptr_to_adr(llstrtype(data)) + \ offsetof(STRTYPE, 'chars') + itemoffsetof(STRTYPE.chars, 0) return cast(TYPEP, data_start) + get_nonmovingbuffer._annenforceargs_ = [strtype] # (str, char*) -> None def free_nonmovingbuffer(data, buf): @@ -658,6 +667,7 @@ keepalive_until_here(data) if not followed_2nd_path: lltype.free(buf, flavor='raw') + free_nonmovingbuffer._annenforceargs_ = [strtype, None] # int -> (char*, str) def alloc_buffer(count): @@ -672,6 +682,7 @@ raw_buf = lltype.malloc(TYPEP.TO, count, flavor='raw') return raw_buf, lltype.nullptr(STRTYPE) alloc_buffer._always_inline_ = True # to get rid of the returned tuple + alloc_buffer._annenforceargs_ = [int] # (char*, str, int, int) -> None def str_from_buffer(raw_buf, gc_buf, allocated_size, needed_size): diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/py/_plugin/hookspec.py b/py/_plugin/hookspec.py deleted file mode 100644 --- a/py/_plugin/hookspec.py +++ /dev/null @@ -1,172 +0,0 @@ -""" -hook specifications for py.test plugins -""" - -# ------------------------------------------------------------------------- -# Command line and configuration -# ------------------------------------------------------------------------- - -def pytest_namespace(): - "return dict of name->object which will get stored at py.test. namespace" - -def pytest_addoption(parser): - "add optparse-style options via parser.addoption." - -def pytest_addhooks(pluginmanager): - "add hooks via pluginmanager.registerhooks(module)" - -def pytest_configure(config): - """ called after command line options have been parsed. - and all plugins and initial conftest files been loaded. - """ - -def pytest_unconfigure(config): - """ called before test process is exited. """ - -# ------------------------------------------------------------------------- -# collection hooks -# ------------------------------------------------------------------------- - -def pytest_ignore_collect(path, config): - """ return true value to prevent considering this path for collection. - This hook is consulted for all files and directories prior to considering - collection hooks. - """ -pytest_ignore_collect.firstresult = True - -def pytest_collect_directory(path, parent): - """ return Collection node or None for the given path. """ -pytest_collect_directory.firstresult = True - -def pytest_collect_file(path, parent): - """ return Collection node or None for the given path. """ - -def pytest_collectstart(collector): - """ collector starts collecting. """ - -def pytest_collectreport(report): - """ collector finished collecting. """ - -def pytest_deselected(items): - """ called for test items deselected by keyword. """ - -def pytest_make_collect_report(collector): - """ perform a collection and return a collection. """ -pytest_make_collect_report.firstresult = True - -# XXX rename to item_collected()? meaning in distribution context? -def pytest_itemstart(item, node=None): - """ test item gets collected. """ - -# ------------------------------------------------------------------------- -# Python test function related hooks -# ------------------------------------------------------------------------- - -def pytest_pycollect_makemodule(path, parent): - """ return a Module collector or None for the given path. - This hook will be called for each matching test module path. - The pytest_collect_file hook needs to be used if you want to - create test modules for files that do not match as a test module. - """ -pytest_pycollect_makemodule.firstresult = True - -def pytest_pycollect_makeitem(collector, name, obj): - """ return custom item/collector for a python object in a module, or None. """ -pytest_pycollect_makeitem.firstresult = True - -def pytest_pyfunc_call(pyfuncitem): - """ call underlying test function. """ -pytest_pyfunc_call.firstresult = True - -def pytest_generate_tests(metafunc): - """ generate (multiple) parametrized calls to a test function.""" - -# ------------------------------------------------------------------------- -# generic runtest related hooks -# ------------------------------------------------------------------------- - -def pytest_runtest_protocol(item): - """ implement fixture, run and report about the given test item. """ -pytest_runtest_protocol.firstresult = True - -def pytest_runtest_setup(item): - """ called before pytest_runtest_call(). """ - -def pytest_runtest_call(item): - """ execute test item. """ - -def pytest_runtest_teardown(item): - """ called after pytest_runtest_call(). """ - -def pytest_runtest_makereport(item, call): - """ make a test report for the given item and call outcome. """ -pytest_runtest_makereport.firstresult = True - -def pytest_runtest_logreport(report): - """ process item test report. """ - -# special handling for final teardown - somewhat internal for now -def pytest__teardown_final(session): - """ called before test session finishes. """ -pytest__teardown_final.firstresult = True - -def pytest__teardown_final_logerror(report): - """ called if runtest_teardown_final failed. """ - -# ------------------------------------------------------------------------- -# test session related hooks -# ------------------------------------------------------------------------- - -def pytest_sessionstart(session): - """ before session.main() is called. """ - -def pytest_sessionfinish(session, exitstatus): - """ whole test run finishes. """ - -# ------------------------------------------------------------------------- -# hooks for influencing reporting (invoked from pytest_terminal) -# ------------------------------------------------------------------------- - -def pytest_report_header(config): - """ return a string to be displayed as header info for terminal reporting.""" - -def pytest_report_teststatus(report): - """ return result-category, shortletter and verbose word for reporting.""" -pytest_report_teststatus.firstresult = True - -def pytest_terminal_summary(terminalreporter): - """ add additional section in terminal summary reporting. """ - -def pytest_report_iteminfo(item): - """ return (fspath, lineno, name) for the item. - the information is used for result display and to sort tests - """ -pytest_report_iteminfo.firstresult = True - -# ------------------------------------------------------------------------- -# doctest hooks -# ------------------------------------------------------------------------- - -def pytest_doctest_prepare_content(content): - """ return processed content for a given doctest""" -pytest_doctest_prepare_content.firstresult = True - - -# ------------------------------------------------------------------------- -# error handling and internal debugging hooks -# ------------------------------------------------------------------------- - -def pytest_plugin_registered(plugin, manager): - """ a new py lib plugin got registered. """ - -def pytest_plugin_unregistered(plugin): - """ a py lib plugin got unregistered. """ - -def pytest_internalerror(excrepr): - """ called for internal errors. """ - -def pytest_keyboard_interrupt(excinfo): - """ called for keyboard interrupt. """ - -def pytest_trace(category, msg): - """ called for debug info. """ diff --git a/py/_plugin/pytest_genscript.py b/py/_plugin/pytest_genscript.py deleted file mode 100755 --- a/py/_plugin/pytest_genscript.py +++ /dev/null @@ -1,69 +0,0 @@ -#! /usr/bin/env python -""" -generate standalone test script to be distributed along with an application. -""" - -import os -import sys -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group.addoption("--genscript", action="store", default=None, - dest="genscript", metavar="path", - help="create standalone py.test script at given target path.") - -def pytest_configure(config): - genscript = config.getvalue("genscript") - if genscript: - import py - mydir = py.path.local(__file__).dirpath() - infile = mydir.join("standalonetemplate.py") - pybasedir = py.path.local(py.__file__).dirpath().dirpath() - genscript = py.path.local(genscript) - main(pybasedir, outfile=genscript, infile=infile) - raise SystemExit(0) - -def main(pybasedir, outfile, infile): - import base64 - import zlib - try: - import pickle - except Importerror: - import cPickle as pickle - - outfile = str(outfile) - infile = str(infile) - assert os.path.isabs(outfile) - os.chdir(str(pybasedir)) - files = [] - for dirpath, dirnames, filenames in os.walk("py"): - for f in filenames: - if not f.endswith(".py"): - continue - - fn = os.path.join(dirpath, f) - files.append(fn) - - name2src = {} - for f in files: - k = f.replace(os.sep, ".")[:-3] - name2src[k] = open(f, "r").read() - - data = pickle.dumps(name2src, 2) - data = zlib.compress(data, 9) - data = base64.encodestring(data) - data = data.decode("ascii") - - exe = open(infile, "r").read() - exe = exe.replace("@SOURCES@", data) - - open(outfile, "w").write(exe) - os.chmod(outfile, 493) # 0755 - sys.stdout.write("generated standalone py.test at %r, have fun!\n" % outfile) - -if __name__=="__main__": - dn = os.path.dirname - here = os.path.abspath(dn(__file__)) # py/plugin/ - pybasedir = dn(dn(here)) - outfile = os.path.join(os.getcwd(), "py.test-standalone") - infile = os.path.join(here, 'standalonetemplate.py') - main(pybasedir, outfile, infile) diff --git a/py/bin/py.convert_unittest b/py/bin/py.convert_unittest deleted file mode 100755 --- a/py/bin/py.convert_unittest +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pyconvert_unittest() \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/py/_test/__init__.py b/py/_test/__init__.py deleted file mode 100644 --- a/py/_test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -""" assertion and py.test helper API.""" diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/py/_code/oldmagic2.py b/py/_code/oldmagic2.py deleted file mode 100644 --- a/py/_code/oldmagic2.py +++ /dev/null @@ -1,6 +0,0 @@ - -import py - -py.log._apiwarn("1.1", "py.magic.AssertionError is deprecated, use py.code._AssertionError", stacklevel=2) - -from py.code import _AssertionError as AssertionError diff --git a/py/_plugin/pytest_default.py b/py/_plugin/pytest_default.py deleted file mode 100644 --- a/py/_plugin/pytest_default.py +++ /dev/null @@ -1,131 +0,0 @@ -""" default hooks and general py.test options. """ - -import sys -import py - -def pytest_pyfunc_call(__multicall__, pyfuncitem): - if not __multicall__.execute(): - testfunction = pyfuncitem.obj - if pyfuncitem._isyieldedfunction(): - testfunction(*pyfuncitem._args) - else: - funcargs = pyfuncitem.funcargs - testfunction(**funcargs) - -def pytest_collect_file(path, parent): - ext = path.ext - pb = path.purebasename - if pb.startswith("test_") or pb.endswith("_test") or \ - path in parent.config._argfspaths: - if ext == ".py": - return parent.ihook.pytest_pycollect_makemodule( - path=path, parent=parent) - -def pytest_pycollect_makemodule(path, parent): - return parent.Module(path, parent) - -def pytest_funcarg__pytestconfig(request): - """ the pytest config object with access to command line opts.""" - return request.config - -def pytest_ignore_collect(path, config): - ignore_paths = config.getconftest_pathlist("collect_ignore", path=path) - ignore_paths = ignore_paths or [] - excludeopt = config.getvalue("ignore") - if excludeopt: - ignore_paths.extend([py.path.local(x) for x in excludeopt]) - return path in ignore_paths - # XXX more refined would be: - if ignore_paths: - for p in ignore_paths: - if path == p or path.relto(p): - return True - - -def pytest_collect_directory(path, parent): - # XXX reconsider the following comment - # not use parent.Directory here as we generally - # want dir/conftest.py to be able to - # define Directory(dir) already - if not parent.recfilter(path): # by default special ".cvs", ... - # check if cmdline specified this dir or a subdir directly - for arg in parent.config._argfspaths: - if path == arg or arg.relto(path): - break - else: - return - Directory = parent.config._getcollectclass('Directory', path) - return Directory(path, parent=parent) - -def pytest_report_iteminfo(item): - return item.reportinfo() - -def pytest_addoption(parser): - group = parser.getgroup("general", "running and selection options") - group._addoption('-x', '--exitfirst', action="store_true", default=False, - dest="exitfirst", - help="exit instantly on first error or failed test."), - group._addoption('--maxfail', metavar="num", - action="store", type="int", dest="maxfail", default=0, - help="exit after first num failures or errors.") - group._addoption('-k', - action="store", dest="keyword", default='', - help="only run test items matching the given " - "space separated keywords. precede a keyword with '-' to negate. " - "Terminate the expression with ':' to treat a match as a signal " - "to run all subsequent tests. ") - - group = parser.getgroup("collect", "collection") - group.addoption('--collectonly', - action="store_true", dest="collectonly", - help="only collect tests, don't execute them."), - group.addoption("--ignore", action="append", metavar="path", - help="ignore path during collection (multi-allowed).") - group.addoption('--confcutdir', dest="confcutdir", default=None, - metavar="dir", - help="only load conftest.py's relative to specified dir.") - - group = parser.getgroup("debugconfig", - "test process debugging and configuration") - group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", - help="base temporary directory for this test run.") - -def pytest_configure(config): - setsession(config) - # compat - if config.getvalue("exitfirst"): - config.option.maxfail = 1 - -def setsession(config): - val = config.getvalue - if val("collectonly"): - from py._test.session import Session - config.setsessionclass(Session) - -# pycollect related hooks and code, should move to pytest_pycollect.py - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - res = __multicall__.execute() - if res is not None: - return res - if collector._istestclasscandidate(name, obj): - res = collector._deprecated_join(name) - if res is not None: - return res - return collector.Class(name, parent=collector) - elif collector.funcnamefilter(name) and hasattr(obj, '__call__'): - res = collector._deprecated_join(name) - if res is not None: - return res - if is_generator(obj): - # XXX deprecation warning - return collector.Generator(name, parent=collector) - else: - return collector._genfunctions(name, obj) - -def is_generator(func): - try: - return py.code.getrawcode(func).co_flags & 32 # generator function - except AttributeError: # builtin functions have no bytecode - # assume them to not be generators - return False diff --git a/pypy/rpython/lltypesystem/opimpl.py b/pypy/rpython/lltypesystem/opimpl.py --- a/pypy/rpython/lltypesystem/opimpl.py +++ b/pypy/rpython/lltypesystem/opimpl.py @@ -227,6 +227,14 @@ assert isinstance(y, int) return x | y +def op_int_xor(x, y): + # used in computing hashes + if isinstance(x, AddressAsInt): x = llmemory.cast_adr_to_int(x.adr) + if isinstance(y, AddressAsInt): y = llmemory.cast_adr_to_int(y.adr) + assert isinstance(x, int) + assert isinstance(y, int) + return x ^ y + def op_int_mul(x, y): assert isinstance(x, (int, llmemory.AddressOffset)) assert isinstance(y, (int, llmemory.AddressOffset)) diff --git a/py/_path/gateway/remotepath.py b/py/_path/gateway/remotepath.py deleted file mode 100644 --- a/py/_path/gateway/remotepath.py +++ /dev/null @@ -1,47 +0,0 @@ -import py, itertools -from py._path import common - -COUNTER = itertools.count() - -class RemotePath(common.PathBase): - sep = '/' - - def __init__(self, channel, id, basename=None): - self._channel = channel - self._id = id - self._basename = basename - self._specs = {} - - def __del__(self): - self._channel.send(('DEL', self._id)) - - def __repr__(self): - return 'RemotePath(%s)' % self.basename - - def listdir(self, *args): - self._channel.send(('LIST', self._id) + args) - return [RemotePath(self._channel, id, basename) - for (id, basename) in self._channel.receive()] - - def dirpath(self): - id = ~COUNTER.next() - self._channel.send(('DIRPATH', self._id, id)) - return RemotePath(self._channel, id) - - def join(self, *args): - id = ~COUNTER.next() - self._channel.send(('JOIN', self._id, id) + args) - return RemotePath(self._channel, id) - - def _getbyspec(self, spec): - parts = spec.split(',') - ask = [x for x in parts if x not in self._specs] - if ask: - self._channel.send(('GET', self._id, ",".join(ask))) - for part, value in zip(ask, self._channel.receive()): - self._specs[part] = value - return [self._specs[x] for x in parts] - - def read(self): - self._channel.send(('READ', self._id)) - return self._channel.receive() diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/py/bin/win32/py.test.cmd b/py/bin/win32/py.test.cmd deleted file mode 100644 --- a/py/bin/win32/py.test.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.test" %* \ No newline at end of file diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/py/_plugin/pytest__pytest.py b/py/_plugin/pytest__pytest.py deleted file mode 100644 --- a/py/_plugin/pytest__pytest.py +++ /dev/null @@ -1,101 +0,0 @@ -import py - -from py._test.pluginmanager import HookRelay - -def pytest_funcarg___pytest(request): - return PytestArg(request) - -class PytestArg: - def __init__(self, request): - self.request = request - - def gethookrecorder(self, hook): - hookrecorder = HookRecorder(hook._registry) - hookrecorder.start_recording(hook._hookspecs) - self.request.addfinalizer(hookrecorder.finish_recording) - return hookrecorder - -class ParsedCall: - def __init__(self, name, locals): - assert '_name' not in locals - self.__dict__.update(locals) - self.__dict__.pop('self') - self._name = name - - def __repr__(self): - d = self.__dict__.copy() - del d['_name'] - return "" %(self._name, d) - -class HookRecorder: - def __init__(self, registry): - self._registry = registry - self.calls = [] - self._recorders = {} - - def start_recording(self, hookspecs): - if not isinstance(hookspecs, (list, tuple)): - hookspecs = [hookspecs] - for hookspec in hookspecs: - assert hookspec not in self._recorders - class RecordCalls: - _recorder = self - for name, method in vars(hookspec).items(): - if name[0] != "_": - setattr(RecordCalls, name, self._makecallparser(method)) - recorder = RecordCalls() - self._recorders[hookspec] = recorder - self._registry.register(recorder) - self.hook = HookRelay(hookspecs, registry=self._registry, - prefix="pytest_") - - def finish_recording(self): - for recorder in self._recorders.values(): - self._registry.unregister(recorder) - self._recorders.clear() - - def _makecallparser(self, method): - name = method.__name__ - args, varargs, varkw, default = py.std.inspect.getargspec(method) - if not args or args[0] != "self": - args.insert(0, 'self') - fspec = py.std.inspect.formatargspec(args, varargs, varkw, default) - # we use exec because we want to have early type - # errors on wrong input arguments, using - # *args/**kwargs delays this and gives errors - # elsewhere - exec (py.code.compile(""" - def %(name)s%(fspec)s: - self._recorder.calls.append( - ParsedCall(%(name)r, locals())) - """ % locals())) - return locals()[name] - - def getcalls(self, names): - if isinstance(names, str): - names = names.split() - for name in names: - for cls in self._recorders: - if name in vars(cls): - break - else: - raise ValueError("callname %r not found in %r" %( - name, self._recorders.keys())) - l = [] - for call in self.calls: - if call._name in names: - l.append(call) - return l - - def popcall(self, name): - for i, call in enumerate(self.calls): - if call._name == name: - del self.calls[i] - return call - raise ValueError("could not find call %r" %(name, )) - - def getcall(self, name): - l = self.getcalls(name) - assert len(l) == 1, (name, l) - return l[0] - diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/py/bin/win32/py.countloc.cmd b/py/bin/win32/py.countloc.cmd deleted file mode 100644 --- a/py/bin/win32/py.countloc.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.countloc" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/translator/c/funcgen.py b/pypy/translator/c/funcgen.py --- a/pypy/translator/c/funcgen.py +++ b/pypy/translator/c/funcgen.py @@ -299,7 +299,6 @@ def gen_link(self, link): "Generate the code to jump across the given Link." - is_alive = {} assignments = [] for a1, a2 in zip(link.args, link.target.inputargs): a2type, a2typename = self.illtypes[a2] @@ -644,9 +643,17 @@ return '%s = %s;' % (self.expr(op.result), items) def OP_DIRECT_PTRADD(self, op): - return '%s = %s + %s;' % (self.expr(op.result), - self.expr(op.args[0]), - self.expr(op.args[1])) + ARRAY = self.lltypemap(op.args[0]).TO + if ARRAY._hints.get("render_as_void"): + return '%s = (char *)%s + %s;' % ( + self.expr(op.result), + self.expr(op.args[0]), + self.expr(op.args[1])) + else: + return '%s = %s + %s;' % ( + self.expr(op.result), + self.expr(op.args[0]), + self.expr(op.args[1])) def OP_CAST_POINTER(self, op): TYPE = self.lltypemap(op.result) @@ -819,7 +826,6 @@ from pypy.rpython.lltypesystem.rstr import STR msg = op.args[0] assert msg.concretetype == Ptr(STR) - argv = [] if isinstance(msg, Constant): msg = c_string_constant(''.join(msg.value.chars)) else: diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/py/bin/win32/py.which.cmd b/py/bin/win32/py.which.cmd deleted file mode 100644 --- a/py/bin/win32/py.which.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.which" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/py/_plugin/pytest_tmpdir.py b/py/_plugin/pytest_tmpdir.py deleted file mode 100644 --- a/py/_plugin/pytest_tmpdir.py +++ /dev/null @@ -1,22 +0,0 @@ -"""provide temporary directories to test functions. - -usage example:: - - def test_plugin(tmpdir): - tmpdir.join("hello").write("hello") - -.. _`py.path.local`: ../../path.html - -""" -import py - -def pytest_funcarg__tmpdir(request): - """return a temporary directory path object - unique to each test function invocation, - created as a sub directory of the base temporary - directory. The returned object is a `py.path.local`_ - path object. - """ - name = request.function.__name__ - x = request.config.mktemp(name, numbered=True) - return x.realpath() diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/py/_test/session.py b/py/_test/session.py deleted file mode 100644 --- a/py/_test/session.py +++ /dev/null @@ -1,135 +0,0 @@ -""" basic test session implementation. - -* drives collection of tests -* triggers executions of tests -* produces events used by reporting -""" - -import py - -# exitcodes for the command line -EXIT_OK = 0 -EXIT_TESTSFAILED = 1 -EXIT_INTERRUPTED = 2 -EXIT_INTERNALERROR = 3 -EXIT_NOHOSTS = 4 - -# imports used for genitems() -Item = py.test.collect.Item -Collector = py.test.collect.Collector - -class Session(object): - nodeid = "" - class Interrupted(KeyboardInterrupt): - """ signals an interrupted test run. """ - __module__ = 'builtins' # for py3 - - def __init__(self, config): - self.config = config - self.pluginmanager = config.pluginmanager # shortcut - self.pluginmanager.register(self) - self._testsfailed = 0 - self._nomatch = False - self.shouldstop = False - - def genitems(self, colitems, keywordexpr=None): - """ yield Items from iterating over the given colitems. """ - if colitems: - colitems = list(colitems) - while colitems: - next = colitems.pop(0) - if isinstance(next, (tuple, list)): - colitems[:] = list(next) + colitems - continue - assert self.pluginmanager is next.config.pluginmanager - if isinstance(next, Item): - remaining = self.filteritems([next]) - if remaining: - self.config.hook.pytest_itemstart(item=next) - yield next - else: - assert isinstance(next, Collector) - self.config.hook.pytest_collectstart(collector=next) - rep = self.config.hook.pytest_make_collect_report(collector=next) - if rep.passed: - for x in self.genitems(rep.result, keywordexpr): - yield x - self.config.hook.pytest_collectreport(report=rep) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - - def filteritems(self, colitems): - """ return items to process (some may be deselected)""" - keywordexpr = self.config.option.keyword - if not keywordexpr or self._nomatch: - return colitems - if keywordexpr[-1] == ":": - keywordexpr = keywordexpr[:-1] - remaining = [] - deselected = [] - for colitem in colitems: - if isinstance(colitem, Item): - if colitem._skipbykeyword(keywordexpr): - deselected.append(colitem) - continue - remaining.append(colitem) - if deselected: - self.config.hook.pytest_deselected(items=deselected) - if self.config.option.keyword.endswith(":"): - self._nomatch = True - return remaining - - def collect(self, colitems): - keyword = self.config.option.keyword - for x in self.genitems(colitems, keyword): - yield x - - def sessionstarts(self): - """ setup any neccessary resources ahead of the test run. """ - self.config.hook.pytest_sessionstart(session=self) - - def pytest_runtest_logreport(self, report): - if report.failed: - self._testsfailed += 1 - maxfail = self.config.getvalue("maxfail") - if maxfail and self._testsfailed >= maxfail: - self.shouldstop = "stopping after %d failures" % ( - self._testsfailed) - pytest_collectreport = pytest_runtest_logreport - - def sessionfinishes(self, exitstatus): - """ teardown any resources after a test run. """ - self.config.hook.pytest_sessionfinish( - session=self, - exitstatus=exitstatus, - ) - - def main(self, colitems): - """ main loop for running tests. """ - self.shouldstop = False - self.sessionstarts() - exitstatus = EXIT_OK - try: - self._mainloop(colitems) - if self._testsfailed: - exitstatus = EXIT_TESTSFAILED - self.sessionfinishes(exitstatus=exitstatus) - except KeyboardInterrupt: - excinfo = py.code.ExceptionInfo() - self.config.hook.pytest_keyboard_interrupt(excinfo=excinfo) - exitstatus = EXIT_INTERRUPTED - except: - excinfo = py.code.ExceptionInfo() - self.config.pluginmanager.notify_exception(excinfo) - exitstatus = EXIT_INTERNALERROR - if exitstatus in (EXIT_INTERNALERROR, EXIT_INTERRUPTED): - self.sessionfinishes(exitstatus=exitstatus) - return exitstatus - - def _mainloop(self, colitems): - for item in self.collect(colitems): - if not self.config.option.collectonly: - item.config.hook.pytest_runtest_protocol(item=item) - if self.shouldstop: - raise self.Interrupted(self.shouldstop) - diff --git a/pypy/config/translationoption.py b/pypy/config/translationoption.py --- a/pypy/config/translationoption.py +++ b/pypy/config/translationoption.py @@ -117,7 +117,6 @@ ChoiceOption("jit_profiler", "integrate profiler support into the JIT", ["off", "oprofile"], default="off"), - BoolOption("jit_ffi", "optimize libffi calls", default=False), # misc BoolOption("verbose", "Print extra information", default=False), diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/py/bin/py.lookup b/py/bin/py.lookup deleted file mode 100755 --- a/py/bin/py.lookup +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pylookup() \ No newline at end of file diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/py/_plugin/pytest_junitxml.py b/py/_plugin/pytest_junitxml.py deleted file mode 100644 --- a/py/_plugin/pytest_junitxml.py +++ /dev/null @@ -1,171 +0,0 @@ -""" - logging of test results in JUnit-XML format, for use with Hudson - and build integration servers. Based on initial code from Ross Lawley. -""" - -import py -import time - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group.addoption('--junitxml', action="store", dest="xmlpath", - metavar="path", default=None, - help="create junit-xml style report file at given path.") - -def pytest_configure(config): - xmlpath = config.option.xmlpath - if xmlpath: - config._xml = LogXML(xmlpath) - config.pluginmanager.register(config._xml) - -def pytest_unconfigure(config): - xml = getattr(config, '_xml', None) - if xml: - del config._xml - config.pluginmanager.unregister(xml) - -class LogXML(object): - def __init__(self, logfile): - self.logfile = logfile - self.test_logs = [] - self.passed = self.skipped = 0 - self.failed = self.errors = 0 - self._durations = {} - - def _opentestcase(self, report): - node = report.item - d = {'time': self._durations.pop(report.item, "0")} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def _closetestcase(self): - self.test_logs.append("") - - def appendlog(self, fmt, *args): - args = tuple([py.xml.escape(arg) for arg in args]) - self.test_logs.append(fmt % args) - - def append_pass(self, report): - self.passed += 1 - self._opentestcase(report) - self._closetestcase() - - def append_failure(self, report): - self._opentestcase(report) - #msg = str(report.longrepr.reprtraceback.extraline) - if "xfail" in report.keywords: - self.appendlog( - '') - self.skipped += 1 - else: - self.appendlog('%s', - report.longrepr) - self.failed += 1 - self._closetestcase() - - def _opentestcase_collectfailure(self, report): - node = report.collector - d = {'time': '???'} - names = [x.replace(".py", "") for x in node.listnames() if x != "()"] - d['classname'] = ".".join(names[:-1]) - d['name'] = names[-1] - attrs = ['%s="%s"' % item for item in sorted(d.items())] - self.test_logs.append("\n" % " ".join(attrs)) - - def append_collect_failure(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_collect_skipped(self, report): - self._opentestcase_collectfailure(report) - #msg = str(report.longrepr.reprtraceback.extraline) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.skipped += 1 - - def append_error(self, report): - self._opentestcase(report) - self.appendlog('%s', - report.longrepr) - self._closetestcase() - self.errors += 1 - - def append_skipped(self, report): - self._opentestcase(report) - if "xfail" in report.keywords: - self.appendlog( - '%s', - report.keywords['xfail']) - else: - self.appendlog("") - self._closetestcase() - self.skipped += 1 - - def pytest_runtest_logreport(self, report): - if report.passed: - self.append_pass(report) - elif report.failed: - if report.when != "call": - self.append_error(report) - else: - self.append_failure(report) - elif report.skipped: - self.append_skipped(report) - - def pytest_runtest_call(self, item, __multicall__): - start = time.time() - try: - return __multicall__.execute() - finally: - self._durations[item] = time.time() - start - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.append_collect_failure(report) - else: - self.append_collect_skipped(report) - - def pytest_internalerror(self, excrepr): - self.errors += 1 - data = py.xml.escape(excrepr) - self.test_logs.append( - '\n' - ' ' - '%s' % data) - - def pytest_sessionstart(self, session): - self.suite_start_time = time.time() - - def pytest_sessionfinish(self, session, exitstatus, __multicall__): - if py.std.sys.version_info[0] < 3: - logfile = py.std.codecs.open(self.logfile, 'w', encoding='utf-8') - else: - logfile = open(self.logfile, 'w', encoding='utf-8') - - suite_stop_time = time.time() - suite_time_delta = suite_stop_time - self.suite_start_time - numtests = self.passed + self.failed - logfile.write('') - logfile.write('') - logfile.writelines(self.test_logs) - logfile.write('') - logfile.close() - tw = session.config.pluginmanager.getplugin("terminalreporter")._tw - tw.line() - tw.sep("-", "generated xml file: %s" %(self.logfile)) diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/objspace/std/test/test_mapdict.py b/pypy/objspace/std/test/test_mapdict.py --- a/pypy/objspace/std/test/test_mapdict.py +++ b/pypy/objspace/std/test/test_mapdict.py @@ -52,6 +52,13 @@ assert aa.get_terminator() is aa.back.back +def test_huge_chain(): + current = Terminator(space, "cls") + for i in range(20000): + current = PlainAttribute((str(i), DICT), current) + assert current.index(("0", DICT)) == 0 + + def test_search(): aa = PlainAttribute(("b", DICT), PlainAttribute(("a", DICT), Terminator(None, None))) assert aa.search(DICT) is aa @@ -224,8 +231,8 @@ obj.setdictvalue(space, "a", 51) obj.setdictvalue(space, "b", 61) obj.setdictvalue(space, "c", 71) - assert obj.getdict() is obj.getdict() - assert obj.getdict().length() == 3 + assert obj.getdict(space) is obj.getdict(space) + assert obj.getdict(space).length() == 3 def test_materialize_r_dict(): @@ -283,7 +290,7 @@ def get_impl(self): cls = Class() w_obj = cls.instantiate(self.fakespace) - return w_obj.getdict() + return w_obj.getdict(self.fakespace) class TestMapDictImplementation(BaseTestRDictImplementation): ImplementionClass = MapDictImplementation get_impl = get_impl @@ -294,8 +301,8 @@ # ___________________________________________________________ # tests that check the obj interface after the dict has devolved -def devolve_dict(obj): - w_d = obj.getdict() +def devolve_dict(space, obj): + w_d = obj.getdict(space) w_d._as_rdict() def test_get_setdictvalue_after_devolve(): @@ -311,7 +318,7 @@ obj.setdictvalue(space, "b", 6) obj.setdictvalue(space, "c", 7) obj.setdictvalue(space, "weakref", 42) - devolve_dict(obj) + devolve_dict(space, obj) assert obj.getdictvalue(space, "a") == 5 assert obj.getdictvalue(space, "b") == 6 assert obj.getdictvalue(space, "c") == 7 @@ -349,10 +356,10 @@ obj.setdictvalue(space, "a", 5) obj.setdictvalue(space, "b", 6) obj.setdictvalue(space, "c", 7) - w_d = obj.getdict() + w_d = obj.getdict(space) obj2 = cls.instantiate() obj2.setdictvalue(space, "d", 8) - obj.setdict(space, obj2.getdict()) + obj.setdict(space, obj2.getdict(space)) assert obj.getdictvalue(space, "a") is None assert obj.getdictvalue(space, "b") is None assert obj.getdictvalue(space, "c") is None @@ -387,7 +394,7 @@ obj.user_setup(space, cls) obj.setdictvalue(space, "a", w1) if objectcls._nmin1 == 0 and not compressptr: - assert rerased.unerase(obj._value0, W_Root) is w1 + assert unerase_item(obj._value0) is w1 else: assert obj._value0 is w1 assert obj.getdictvalue(space, "a") is w1 @@ -395,7 +402,7 @@ assert obj.getdictvalue(space, "c") is None obj.setdictvalue(space, "a", w2) if objectcls._nmin1 == 0 and not compressptr: - assert rerased.unerase(obj._value0, W_Root) is w2 + assert unerase_item(obj._value0) is w2 else: assert obj._value0 is w2 assert obj.getdictvalue(space, "a") == w2 @@ -416,7 +423,7 @@ res = obj.deldictvalue(space, "a") assert res if objectcls._nmin1 == 0 and not compressptr: - assert rerased.unerase(obj._value0, W_Root) is w4 + assert unerase_item(obj._value0) is w4 else: assert obj._value0 is w4 assert obj.getdictvalue(space, "a") is None @@ -885,6 +892,38 @@ res = self.check(f, 'm') assert res == (0, 2, 1) + def test_dont_keep_class_alive(self): + import weakref + import gc + def f(): + class C(object): + def m(self): + pass + r = weakref.ref(C) + # Trigger cache. + C().m() + del C + gc.collect(); gc.collect(); gc.collect() + assert r() is None + return 42 + f() + + def test_instance_keeps_class_alive(self): + import weakref + import gc + def f(): + class C(object): + def m(self): + return 42 + r = weakref.ref(C) + c = C() + del C + gc.collect(); gc.collect(); gc.collect() + return c.m() + val = f() + assert val == 42 + f() + class AppTestGlobalCaching(AppTestWithMapDict): def setup_class(cls): cls.space = gettestobjspace( diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/py/_plugin/pytest_capture.py b/py/_plugin/pytest_capture.py deleted file mode 100644 --- a/py/_plugin/pytest_capture.py +++ /dev/null @@ -1,288 +0,0 @@ -""" -configurable per-test stdout/stderr capturing mechanisms. - -This plugin captures stdout/stderr output for each test separately. -In case of test failures this captured output is shown grouped -togtther with the test. - -The plugin also provides test function arguments that help to -assert stdout/stderr output from within your tests, see the -`funcarg example`_. - - -Capturing of input/output streams during tests ---------------------------------------------------- - -By default ``sys.stdout`` and ``sys.stderr`` are substituted with -temporary streams during the execution of tests and setup/teardown code. -During the whole testing process it will re-use the same temporary -streams allowing to play well with the logging module which easily -takes ownership on these streams. - -Also, 'sys.stdin' is substituted with a file-like "null" object that -does not return any values. This is to immediately error out -on tests that wait on reading something from stdin. - -You can influence output capturing mechanisms from the command line:: - - py.test -s # disable all capturing - py.test --capture=sys # replace sys.stdout/stderr with in-mem files - py.test --capture=fd # point filedescriptors 1 and 2 to temp file - -If you set capturing values in a conftest file like this:: - - # conftest.py - option_capture = 'fd' - -then all tests in that directory will execute with "fd" style capturing. - -sys-level capturing ------------------------------------------- - -Capturing on 'sys' level means that ``sys.stdout`` and ``sys.stderr`` -will be replaced with in-memory files (``py.io.TextIO`` to be precise) -that capture writes and decode non-unicode strings to a unicode object -(using a default, usually, UTF-8, encoding). - -FD-level capturing and subprocesses ------------------------------------------- - -The ``fd`` based method means that writes going to system level files -based on the standard file descriptors will be captured, for example -writes such as ``os.write(1, 'hello')`` will be captured properly. -Capturing on fd-level will include output generated from -any subprocesses created during a test. - -.. _`funcarg example`: - -Example Usage of the capturing Function arguments ---------------------------------------------------- - -You can use the `capsys funcarg`_ and `capfd funcarg`_ to -capture writes to stdout and stderr streams. Using the -funcargs frees your test from having to care about setting/resetting -the old streams and also interacts well with py.test's own -per-test capturing. Here is an example test function: - -.. sourcecode:: python - - def test_myoutput(capsys): - print ("hello") - sys.stderr.write("world\\n") - out, err = capsys.readouterr() - assert out == "hello\\n" - assert err == "world\\n" - print "next" - out, err = capsys.readouterr() - assert out == "next\\n" - -The ``readouterr()`` call snapshots the output so far - -and capturing will be continued. After the test -function finishes the original streams will -be restored. If you want to capture on -the filedescriptor level you can use the ``capfd`` function -argument which offers the same interface. -""" - -import py -import os - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--capture', action="store", default=None, - metavar="method", type="choice", choices=['fd', 'sys', 'no'], - help="per-test capturing method: one of fd (default)|sys|no.") - group._addoption('-s', action="store_const", const="no", dest="capture", - help="shortcut for --capture=no.") - -def addouterr(rep, outerr): - repr = getattr(rep, 'longrepr', None) - if not hasattr(repr, 'addsection'): - return - for secname, content in zip(["out", "err"], outerr): - if content: - repr.addsection("Captured std%s" % secname, content.rstrip()) - -def pytest_configure(config): - config.pluginmanager.register(CaptureManager(), 'capturemanager') - -class NoCapture: - def startall(self): - pass - def resume(self): - pass - def suspend(self): - return "", "" - -class CaptureManager: - def __init__(self): - self._method2capture = {} - - def _maketempfile(self): - f = py.std.tempfile.TemporaryFile() - newf = py.io.dupfile(f, encoding="UTF-8") - return newf - - def _makestringio(self): - return py.io.TextIO() - - def _getcapture(self, method): - if method == "fd": - return py.io.StdCaptureFD(now=False, - out=self._maketempfile(), err=self._maketempfile() - ) - elif method == "sys": - return py.io.StdCapture(now=False, - out=self._makestringio(), err=self._makestringio() - ) - elif method == "no": - return NoCapture() - else: - raise ValueError("unknown capturing method: %r" % method) - - def _getmethod(self, config, fspath): - if config.option.capture: - method = config.option.capture - else: - try: - method = config._conftest.rget("option_capture", path=fspath) - except KeyError: - method = "fd" - if method == "fd" and not hasattr(os, 'dup'): # e.g. jython - method = "sys" - return method - - def resumecapture_item(self, item): - method = self._getmethod(item.config, item.fspath) - if not hasattr(item, 'outerr'): - item.outerr = ('', '') # we accumulate outerr on the item - return self.resumecapture(method) - - def resumecapture(self, method): - if hasattr(self, '_capturing'): - raise ValueError("cannot resume, already capturing with %r" % - (self._capturing,)) - cap = self._method2capture.get(method) - self._capturing = method - if cap is None: - self._method2capture[method] = cap = self._getcapture(method) - cap.startall() - else: - cap.resume() - - def suspendcapture(self, item=None): - self.deactivate_funcargs() - if hasattr(self, '_capturing'): - method = self._capturing - cap = self._method2capture.get(method) - if cap is not None: - outerr = cap.suspend() - del self._capturing - if item: - outerr = (item.outerr[0] + outerr[0], - item.outerr[1] + outerr[1]) - return outerr - return "", "" - - def activate_funcargs(self, pyfuncitem): - if not hasattr(pyfuncitem, 'funcargs'): - return - assert not hasattr(self, '_capturing_funcargs') - self._capturing_funcargs = capturing_funcargs = [] - for name, capfuncarg in pyfuncitem.funcargs.items(): - if name in ('capsys', 'capfd'): - capturing_funcargs.append(capfuncarg) - capfuncarg._start() - - def deactivate_funcargs(self): - capturing_funcargs = getattr(self, '_capturing_funcargs', None) - if capturing_funcargs is not None: - while capturing_funcargs: - capfuncarg = capturing_funcargs.pop() - capfuncarg._finalize() - del self._capturing_funcargs - - def pytest_make_collect_report(self, __multicall__, collector): - method = self._getmethod(collector.config, collector.fspath) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - addouterr(rep, outerr) - return rep - - def pytest_runtest_setup(self, item): - self.resumecapture_item(item) - - def pytest_runtest_call(self, item): - self.resumecapture_item(item) - self.activate_funcargs(item) - - def pytest_runtest_teardown(self, item): - self.resumecapture_item(item) - - def pytest__teardown_final(self, __multicall__, session): - method = self._getmethod(session.config, None) - self.resumecapture(method) - try: - rep = __multicall__.execute() - finally: - outerr = self.suspendcapture() - if rep: - addouterr(rep, outerr) - return rep - - def pytest_keyboard_interrupt(self, excinfo): - if hasattr(self, '_capturing'): - self.suspendcapture() - - def pytest_runtest_makereport(self, __multicall__, item, call): - self.deactivate_funcargs() - rep = __multicall__.execute() - outerr = self.suspendcapture(item) - if not rep.passed: - addouterr(rep, outerr) - if not rep.passed or rep.when == "teardown": - outerr = ('', '') - item.outerr = outerr - return rep - -def pytest_funcarg__capsys(request): - """captures writes to sys.stdout/sys.stderr and makes - them available successively via a ``capsys.readouterr()`` method - which returns a ``(out, err)`` tuple of captured snapshot strings. - """ - return CaptureFuncarg(request, py.io.StdCapture) - -def pytest_funcarg__capfd(request): - """captures writes to file descriptors 1 and 2 and makes - snapshotted ``(out, err)`` string tuples available - via the ``capsys.readouterr()`` method. If the underlying - platform does not have ``os.dup`` (e.g. Jython) tests using - this funcarg will automatically skip. - """ - if not hasattr(os, 'dup'): - py.test.skip("capfd funcarg needs os.dup") - return CaptureFuncarg(request, py.io.StdCaptureFD) - - -class CaptureFuncarg: - def __init__(self, request, captureclass): - self._cclass = captureclass - self.capture = self._cclass(now=False) - #request.addfinalizer(self._finalize) - - def _start(self): - self.capture.startall() - - def _finalize(self): - if hasattr(self, 'capture'): - self.capture.reset() - del self.capture - - def readouterr(self): - return self.capture.readouterr() - - def close(self): - self._finalize() diff --git a/py/_plugin/pytest_doctest.py b/py/_plugin/pytest_doctest.py deleted file mode 100644 --- a/py/_plugin/pytest_doctest.py +++ /dev/null @@ -1,100 +0,0 @@ -""" -collect and execute doctests from modules and test files. - -Usage -------------- - -By default all files matching the ``test*.txt`` pattern will -be run through the python standard ``doctest`` module. Issue:: - - py.test --doctest-glob='*.rst' - -to change the pattern. Additionally you can trigger running of -tests in all python modules (including regular python test modules):: - - py.test --doctest-modules - -You can also make these changes permanent in your project by -putting them into a conftest.py file like this:: - - # content of conftest.py - option_doctestmodules = True - option_doctestglob = "*.rst" -""" - -import py -from py._code.code import TerminalRepr, ReprFileLocation -import doctest - -def pytest_addoption(parser): - group = parser.getgroup("collect") - group.addoption("--doctest-modules", - action="store_true", default=False, - help="run doctests in all .py modules", - dest="doctestmodules") - group.addoption("--doctest-glob", - action="store", default="test*.txt", metavar="pat", - help="doctests file matching pattern, default: test*.txt", - dest="doctestglob") - -def pytest_collect_file(path, parent): - config = parent.config - if path.ext == ".py": - if config.getvalue("doctestmodules"): - return DoctestModule(path, parent) - elif path.check(fnmatch=config.getvalue("doctestglob")): - return DoctestTextfile(path, parent) - -class ReprFailDoctest(TerminalRepr): - def __init__(self, reprlocation, lines): - self.reprlocation = reprlocation - self.lines = lines - def toterminal(self, tw): - for line in self.lines: - tw.line(line) - self.reprlocation.toterminal(tw) - -class DoctestItem(py.test.collect.Item): - def __init__(self, path, parent): - name = self.__class__.__name__ + ":" + path.basename - super(DoctestItem, self).__init__(name=name, parent=parent) - self.fspath = path - - def repr_failure(self, excinfo): - if excinfo.errisinstance(doctest.DocTestFailure): - doctestfailure = excinfo.value - example = doctestfailure.example - test = doctestfailure.test - filename = test.filename - lineno = test.lineno + example.lineno + 1 - message = excinfo.type.__name__ - reprlocation = ReprFileLocation(filename, lineno, message) - checker = doctest.OutputChecker() - REPORT_UDIFF = doctest.REPORT_UDIFF - filelines = py.path.local(filename).readlines(cr=0) - i = max(test.lineno, max(0, lineno - 10)) # XXX? - lines = [] - for line in filelines[i:lineno]: - lines.append("%03d %s" % (i+1, line)) - i += 1 - lines += checker.output_difference(example, - doctestfailure.got, REPORT_UDIFF).split("\n") - return ReprFailDoctest(reprlocation, lines) - elif excinfo.errisinstance(doctest.UnexpectedException): - excinfo = py.code.ExceptionInfo(excinfo.value.exc_info) - return super(DoctestItem, self).repr_failure(excinfo) - else: - return super(DoctestItem, self).repr_failure(excinfo) - -class DoctestTextfile(DoctestItem): - def runtest(self): - if not self._deprecated_testexecution(): - failed, tot = doctest.testfile( - str(self.fspath), module_relative=False, - raise_on_error=True, verbose=0) - -class DoctestModule(DoctestItem): - def runtest(self): - module = self.fspath.pyimport() - failed, tot = doctest.testmod( - module, raise_on_error=True, verbose=0) diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/py/_plugin/pytest_skipping.py b/py/_plugin/pytest_skipping.py deleted file mode 100644 --- a/py/_plugin/pytest_skipping.py +++ /dev/null @@ -1,347 +0,0 @@ -""" -advanced skipping for python test functions, classes or modules. - -With this plugin you can mark test functions for conditional skipping -or as "xfail", expected-to-fail. Skipping a test will avoid running it -while xfail-marked tests will run and result in an inverted outcome: -a pass becomes a failure and a fail becomes a semi-passing one. - -The need for skipping a test is usually connected to a condition. -If a test fails under all conditions then it's probably better -to mark your test as 'xfail'. - -By passing ``-rxs`` to the terminal reporter you will see extra -summary information on skips and xfail-run tests at the end of a test run. - -.. _skipif: - -Skipping a single function -------------------------------------------- - -Here is an example for marking a test function to be skipped -when run on a Python3 interpreter:: - - @py.test.mark.skipif("sys.version_info >= (3,0)") - def test_function(): - ... - -During test function setup the skipif condition is -evaluated by calling ``eval(expr, namespace)``. The namespace -contains the ``sys`` and ``os`` modules and the test -``config`` object. The latter allows you to skip based -on a test configuration value e.g. like this:: - - @py.test.mark.skipif("not config.getvalue('db')") - def test_function(...): - ... - -Create a shortcut for your conditional skip decorator -at module level like this:: - - win32only = py.test.mark.skipif("sys.platform != 'win32'") - - @win32only - def test_function(): - ... - - -skip groups of test functions --------------------------------------- - -As with all metadata function marking you can do it at -`whole class- or module level`_. Here is an example -for skipping all methods of a test class based on platform:: - - class TestPosixCalls: - pytestmark = py.test.mark.skipif("sys.platform == 'win32'") - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -The ``pytestmark`` decorator will be applied to each test function. -If your code targets python2.6 or above you can equivalently use -the skipif decorator on classes:: - - @py.test.mark.skipif("sys.platform == 'win32'") - class TestPosixCalls: - - def test_function(self): - # will not be setup or run under 'win32' platform - # - -It is fine in general to apply multiple "skipif" decorators -on a single function - this means that if any of the conditions -apply the function will be skipped. - -.. _`whole class- or module level`: mark.html#scoped-marking - - -mark a test function as **expected to fail** -------------------------------------------------------- - -You can use the ``xfail`` marker to indicate that you -expect the test to fail:: - - @py.test.mark.xfail - def test_function(): - ... - -This test will be run but no traceback will be reported -when it fails. Instead terminal reporting will list it in the -"expected to fail" or "unexpectedly passing" sections. - -Same as with skipif_ you can also selectively expect a failure -depending on platform:: - - @py.test.mark.xfail("sys.version_info >= (3,0)") - def test_function(): - ... - -To not run a test and still regard it as "xfailed":: - - @py.test.mark.xfail(..., run=False) - -To specify an explicit reason to be shown with xfailure detail:: - - @py.test.mark.xfail(..., reason="my reason") - -imperative xfail from within a test or setup function ------------------------------------------------------- - -If you cannot declare xfail-conditions at import time -you can also imperatively produce an XFail-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.xfail("unsuppored configuration") - - -skipping on a missing import dependency --------------------------------------------------- - -You can use the following import helper at module level -or within a test or test setup function:: - - docutils = py.test.importorskip("docutils") - -If ``docutils`` cannot be imported here, this will lead to a -skip outcome of the test. You can also skip dependeing if -if a library does not come with a high enough version:: - - docutils = py.test.importorskip("docutils", minversion="0.3") - -The version will be read from the specified module's ``__version__`` attribute. - -imperative skip from within a test or setup function ------------------------------------------------------- - -If for some reason you cannot declare skip-conditions -you can also imperatively produce a Skip-outcome from -within test or setup code. Example:: - - def test_function(): - if not valid_config(): - py.test.skip("unsuppored configuration") - -""" - -import py - -def pytest_addoption(parser): - group = parser.getgroup("general") - group.addoption('--runxfail', - action="store_true", dest="runxfail", default=False, - help="run tests even if they are marked xfail") - -class MarkEvaluator: - def __init__(self, item, name): - self.item = item - self.name = name - self.holder = getattr(item.obj, name, None) - - def __bool__(self): - return bool(self.holder) - __nonzero__ = __bool__ - - def istrue(self): - if self.holder: - d = {'os': py.std.os, 'sys': py.std.sys, 'config': self.item.config} - if self.holder.args: - self.result = False - for expr in self.holder.args: - self.expr = expr - if isinstance(expr, str): - result = cached_eval(self.item.config, expr, d) - else: - result = expr - if result: - self.result = True - self.expr = expr - break - else: - self.result = True - return getattr(self, 'result', False) - - def get(self, attr, default=None): - return self.holder.kwargs.get(attr, default) - - def getexplanation(self): - expl = self.get('reason', None) - if not expl: - if not hasattr(self, 'expr'): - return "" - else: - return "condition: " + self.expr - return expl - - -def pytest_runtest_setup(item): - if not isinstance(item, py.test.collect.Function): - return - evalskip = MarkEvaluator(item, 'skipif') - if evalskip.istrue(): - py.test.skip(evalskip.getexplanation()) - item._evalxfail = MarkEvaluator(item, 'xfail') - if not item.config.getvalue("runxfail"): - if item._evalxfail.istrue(): - if not item._evalxfail.get('run', True): - py.test.skip("xfail") - -def pytest_runtest_makereport(__multicall__, item, call): - if not isinstance(item, py.test.collect.Function): - return - if not (call.excinfo and - call.excinfo.errisinstance(py.test.xfail.Exception)): - evalxfail = getattr(item, '_evalxfail', None) - if not evalxfail: - return - if call.excinfo and call.excinfo.errisinstance(py.test.xfail.Exception): - if not item.config.getvalue("runxfail"): - rep = __multicall__.execute() - rep.keywords['xfail'] = "reason: " + call.excinfo.value.msg - rep.skipped = True - rep.failed = False - return rep - if call.when == "setup": - rep = __multicall__.execute() - if rep.skipped and evalxfail.istrue(): - expl = evalxfail.getexplanation() - if not evalxfail.get("run", True): - expl = "[NOTRUN] " + expl - rep.keywords['xfail'] = expl - return rep - elif call.when == "call": - rep = __multicall__.execute() - if not item.config.getvalue("runxfail") and evalxfail.istrue(): - if call.excinfo: - rep.skipped = True - rep.failed = rep.passed = False - else: - rep.skipped = rep.passed = False - rep.failed = True - rep.keywords['xfail'] = evalxfail.getexplanation() - else: - if 'xfail' in rep.keywords: - del rep.keywords['xfail'] - return rep - -# called by terminalreporter progress reporting -def pytest_report_teststatus(report): - if 'xfail' in report.keywords: - if report.skipped: - return "xfailed", "x", "xfail" - elif report.failed: - return "xpassed", "X", "XPASS" - -# called by the terminalreporter instance/plugin -def pytest_terminal_summary(terminalreporter): - tr = terminalreporter - if not tr.reportchars: - #for name in "xfailed skipped failed xpassed": - # if not tr.stats.get(name, 0): - # tr.write_line("HINT: use '-r' option to see extra " - # "summary info about tests") - # break - return - - lines = [] - for char in tr.reportchars: - if char == "x": - show_xfailed(terminalreporter, lines) - elif char == "X": - show_xpassed(terminalreporter, lines) - elif char == "f": - show_failed(terminalreporter, lines) - elif char == "s": - show_skipped(terminalreporter, lines) - if lines: - tr._tw.sep("=", "short test summary info") - for line in lines: - tr._tw.line(line) - -def show_failed(terminalreporter, lines): - tw = terminalreporter._tw - failed = terminalreporter.stats.get("failed") - if failed: - for rep in failed: - pos = terminalreporter.gettestid(rep.item) - lines.append("FAIL %s" %(pos, )) - -def show_xfailed(terminalreporter, lines): - xfailed = terminalreporter.stats.get("xfailed") - if xfailed: - for rep in xfailed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XFAIL %s %s" %(pos, reason)) - -def show_xpassed(terminalreporter, lines): - xpassed = terminalreporter.stats.get("xpassed") - if xpassed: - for rep in xpassed: - pos = terminalreporter.gettestid(rep.item) - reason = rep.keywords['xfail'] - lines.append("XPASS %s %s" %(pos, reason)) - -def cached_eval(config, expr, d): - if not hasattr(config, '_evalcache'): - config._evalcache = {} - try: - return config._evalcache[expr] - except KeyError: - #import sys - #print >>sys.stderr, ("cache-miss: %r" % expr) - config._evalcache[expr] = x = eval(expr, d) - return x - - -def folded_skips(skipped): - d = {} - for event in skipped: - entry = event.longrepr.reprcrash - key = entry.path, entry.lineno, entry.message - d.setdefault(key, []).append(event) - l = [] - for key, events in d.items(): - l.append((len(events),) + key) - return l - -def show_skipped(terminalreporter, lines): - tr = terminalreporter - skipped = tr.stats.get('skipped', []) - if skipped: - #if not tr.hasopt('skipped'): - # tr.write_line( - # "%d skipped tests, specify -rs for more info" % - # len(skipped)) - # return - fskips = folded_skips(skipped) - if fskips: - #tr.write_sep("_", "skipped test summary") - for num, fspath, lineno, reason in fskips: - if reason.startswith("Skipped: "): - reason = reason[9:] - lines.append("SKIP [%d] %s:%d: %s" % - (num, fspath, lineno, reason)) diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/py/_plugin/pytest_nose.py b/py/_plugin/pytest_nose.py deleted file mode 100644 --- a/py/_plugin/pytest_nose.py +++ /dev/null @@ -1,98 +0,0 @@ -"""nose-compatibility plugin: allow to run nose test suites natively. - -This is an experimental plugin for allowing to run tests written -in 'nosetests style with py.test. - -Usage -------------- - -type:: - - py.test # instead of 'nosetests' - -and you should be able to run nose style tests and at the same -time can make full use of py.test's capabilities. - -Supported nose Idioms ----------------------- - -* setup and teardown at module/class/method level -* SkipTest exceptions and markers -* setup/teardown decorators -* yield-based tests and their setup -* general usage of nose utilities - -Unsupported idioms / issues ----------------------------------- - -- nose-style doctests are not collected and executed correctly, - also fixtures don't work. - -- no nose-configuration is recognized - -If you find other issues or have suggestions please run:: - - py.test --pastebin=all - -and send the resulting URL to a py.test contact channel, -at best to the mailing list. -""" -import py -import inspect -import sys - -def pytest_runtest_makereport(__multicall__, item, call): - SkipTest = getattr(sys.modules.get('nose', None), 'SkipTest', None) - if SkipTest: - if call.excinfo and call.excinfo.errisinstance(SkipTest): - # let's substitute the excinfo with a py.test.skip one - call2 = call.__class__(lambda: py.test.skip(str(call.excinfo.value)), call.when) - call.excinfo = call2.excinfo - -def pytest_report_iteminfo(item): - # nose 0.11.1 uses decorators for "raises" and other helpers. - # for reporting progress by filename we fish for the filename - if isinstance(item, py.test.collect.Function): - obj = item.obj - if hasattr(obj, 'compat_co_firstlineno'): - fn = sys.modules[obj.__module__].__file__ - if fn.endswith(".pyc"): - fn = fn[:-1] - #assert 0 - #fn = inspect.getsourcefile(obj) or inspect.getfile(obj) - lineno = obj.compat_co_firstlineno - return py.path.local(fn), lineno, obj.__module__ - -def pytest_runtest_setup(item): - if isinstance(item, (py.test.collect.Function)): - if isinstance(item.parent, py.test.collect.Generator): - gen = item.parent - if not hasattr(gen, '_nosegensetup'): - call_optional(gen.obj, 'setup') - if isinstance(gen.parent, py.test.collect.Instance): - call_optional(gen.parent.obj, 'setup') - gen._nosegensetup = True - if not call_optional(item.obj, 'setup'): - # call module level setup if there is no object level one - call_optional(item.parent.obj, 'setup') - -def pytest_runtest_teardown(item): - if isinstance(item, py.test.collect.Function): - if not call_optional(item.obj, 'teardown'): - call_optional(item.parent.obj, 'teardown') - #if hasattr(item.parent, '_nosegensetup'): - # #call_optional(item._nosegensetup, 'teardown') - # del item.parent._nosegensetup - -def pytest_make_collect_report(collector): - if isinstance(collector, py.test.collect.Generator): - call_optional(collector.obj, 'setup') - -def call_optional(obj, name): - method = getattr(obj, name, None) - if method: - ismethod = inspect.ismethod(method) - rawcode = py.code.getrawcode(method) - if not rawcode.co_varnames[ismethod:]: - method() - return True diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/py/_plugin/pytest_mark.py b/py/_plugin/pytest_mark.py deleted file mode 100644 --- a/py/_plugin/pytest_mark.py +++ /dev/null @@ -1,174 +0,0 @@ -""" -generic mechanism for marking python functions. - -By using the ``py.test.mark`` helper you can instantiate -decorators that will set named meta data on test functions. - -Marking a single function ----------------------------------------------------- - -You can "mark" a test function with meta data like this:: - - @py.test.mark.webtest - def test_send_http(): - ... - -This will set a "Marker" instance as a function attribute named "webtest". -You can also specify parametrized meta data like this:: - - @py.test.mark.webtest(firefox=30) - def test_receive(): - ... - -The named marker can be accessed like this later:: - - test_receive.webtest.kwargs['firefox'] == 30 - -In addition to set key-value pairs you can also use positional arguments:: - - @py.test.mark.webtest("triangular") - def test_receive(): - ... - -and later access it with ``test_receive.webtest.args[0] == 'triangular``. - -.. _`scoped-marking`: - -Marking whole classes or modules ----------------------------------------------------- - -If you are programming with Python2.6 you may use ``py.test.mark`` decorators -with classes to apply markers to all its test methods:: - - @py.test.mark.webtest - class TestClass: - def test_startup(self): - ... - def test_startup_and_more(self): - ... - -This is equivalent to directly applying the decorator to the -two test functions. - -To remain compatible with Python2.5 you can also set a -``pytestmark`` attribute on a TestClass like this:: - - import py - - class TestClass: - pytestmark = py.test.mark.webtest - -or if you need to use multiple markers you can use a list:: - - import py - - class TestClass: - pytestmark = [py.test.mark.webtest, pytest.mark.slowtest] - -You can also set a module level marker:: - - import py - pytestmark = py.test.mark.webtest - -in which case it will be applied to all functions and -methods defined in the module. - -Using "-k MARKNAME" to select tests ----------------------------------------------------- - -You can use the ``-k`` command line option to select -tests:: - - py.test -k webtest # will only run tests marked as webtest - -""" -import py - -def pytest_namespace(): - return {'mark': MarkGenerator()} - -class MarkGenerator: - """ non-underscore attributes of this object can be used as decorators for - marking test functions. Example: @py.test.mark.slowtest in front of a - function will set the 'slowtest' marker object on it. """ - def __getattr__(self, name): - if name[0] == "_": - raise AttributeError(name) - return MarkDecorator(name) - -class MarkDecorator: - """ decorator for setting function attributes. """ - def __init__(self, name): - self.markname = name - self.kwargs = {} - self.args = [] - - def __repr__(self): - d = self.__dict__.copy() - name = d.pop('markname') - return "" %(name, d) - - def __call__(self, *args, **kwargs): - """ if passed a single callable argument: decorate it with mark info. - otherwise add *args/**kwargs in-place to mark information. """ - if args: - func = args[0] - if len(args) == 1 and hasattr(func, '__call__') or \ - hasattr(func, '__bases__'): - if hasattr(func, '__bases__'): - if hasattr(func, 'pytestmark'): - l = func.pytestmark - if not isinstance(l, list): - func.pytestmark = [l, self] - else: - l.append(self) - else: - func.pytestmark = [self] - else: - holder = getattr(func, self.markname, None) - if holder is None: - holder = MarkInfo(self.markname, self.args, self.kwargs) - setattr(func, self.markname, holder) - else: - holder.kwargs.update(self.kwargs) - holder.args.extend(self.args) - return func - else: - self.args.extend(args) - self.kwargs.update(kwargs) - return self - -class MarkInfo: - def __init__(self, name, args, kwargs): - self._name = name - self.args = args - self.kwargs = kwargs - - def __getattr__(self, name): - if name[0] != '_' and name in self.kwargs: - py.log._apiwarn("1.1", "use .kwargs attribute to access key-values") - return self.kwargs[name] - raise AttributeError(name) - - def __repr__(self): - return "" % ( - self._name, self.args, self.kwargs) - - -def pytest_pycollect_makeitem(__multicall__, collector, name, obj): - item = __multicall__.execute() - if isinstance(item, py.test.collect.Function): - cls = collector.getparent(py.test.collect.Class) - mod = collector.getparent(py.test.collect.Module) - func = item.obj - func = getattr(func, '__func__', func) # py3 - func = getattr(func, 'im_func', func) # py2 - for parent in [x for x in (mod, cls) if x]: - marker = getattr(parent.obj, 'pytestmark', None) - if marker is not None: - if not isinstance(marker, list): - marker = [marker] - for mark in marker: - if isinstance(mark, MarkDecorator): - mark(func) - return item diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/translator/c/node.py b/pypy/translator/c/node.py --- a/pypy/translator/c/node.py +++ b/pypy/translator/c/node.py @@ -12,7 +12,7 @@ from pypy.translator.c.support import c_char_array_constant, barebonearray from pypy.translator.c.primitive import PrimitiveType, name_signed from pypy.rlib import exports -from pypy.rlib.rarithmetic import isinf, isnan +from pypy.rlib.rfloat import isinf, isnan from pypy.rlib.rstackovf import _StackOverflow from pypy.translator.c import extfunc from pypy.translator.tool.cbuild import ExternalCompilationInfo @@ -338,12 +338,15 @@ self.varlength = varlength self.dependencies = {} contained_type = ARRAY.OF - if ARRAY._hints.get("render_as_void"): - contained_type = Void + # There is no such thing as an array of voids: + # we use a an array of chars instead; only the pointer can be void*. self.itemtypename = db.gettype(contained_type, who_asks=self) self.fulltypename = self.itemtypename.replace('@', '(@)[%d]' % (self.varlength,)) - self.fullptrtypename = self.itemtypename.replace('@', '*@') + if ARRAY._hints.get("render_as_void"): + self.fullptrtypename = 'void *@' + else: + self.fullptrtypename = self.itemtypename.replace('@', '*@') def setup(self): """Array loops are forbidden by ForwardReference.become() because @@ -364,7 +367,10 @@ return self.itemindex_access_expr(baseexpr, index) def itemindex_access_expr(self, baseexpr, indexexpr): - return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr) + if self.ARRAY._hints.get("render_as_void"): + return 'RPyBareItem((char*)%s, %s)' % (baseexpr, indexexpr) + else: + return 'RPyBareItem(%s, %s)' % (baseexpr, indexexpr) def definition(self): return [] # no declaration is needed diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/py/_plugin/pytest_restdoc.py b/py/_plugin/pytest_restdoc.py deleted file mode 100644 --- a/py/_plugin/pytest_restdoc.py +++ /dev/null @@ -1,429 +0,0 @@ -""" -perform ReST syntax, local and remote reference tests on .rst/.txt files. -""" -import py -import sys, os, re - -def pytest_addoption(parser): - group = parser.getgroup("ReST", "ReST documentation check options") - group.addoption('-R', '--urlcheck', - action="store_true", dest="urlcheck", default=False, - help="urlopen() remote links found in ReST text files.") - group.addoption('--urltimeout', action="store", metavar="secs", - type="int", dest="urlcheck_timeout", default=5, - help="timeout in seconds for remote urlchecks") - group.addoption('--forcegen', - action="store_true", dest="forcegen", default=False, - help="force generation of html files.") - -def pytest_collect_file(path, parent): - if path.ext in (".txt", ".rst"): - project = getproject(path) - if project is not None: - return ReSTFile(path, parent=parent, project=project) - -def getproject(path): - for parent in path.parts(reverse=True): - confrest = parent.join("confrest.py") - if confrest.check(): - Project = confrest.pyimport().Project - return Project(parent) - -class ReSTFile(py.test.collect.File): - def __init__(self, fspath, parent, project): - super(ReSTFile, self).__init__(fspath=fspath, parent=parent) - self.project = project - - def collect(self): - return [ - ReSTSyntaxTest("ReSTSyntax", parent=self, project=self.project), - LinkCheckerMaker("checklinks", parent=self), - DoctestText("doctest", parent=self), - ] - -def deindent(s, sep='\n'): - leastspaces = -1 - lines = s.split(sep) - for line in lines: - if not line.strip(): - continue - spaces = len(line) - len(line.lstrip()) - if leastspaces == -1 or spaces < leastspaces: - leastspaces = spaces - if leastspaces == -1: - return s - for i, line in enumerate(lines): - if not line.strip(): - lines[i] = '' - else: - lines[i] = line[leastspaces:] - return sep.join(lines) - -class ReSTSyntaxTest(py.test.collect.Item): - def __init__(self, name, parent, project): - super(ReSTSyntaxTest, self).__init__(name=name, parent=parent) - self.project = project - - def reportinfo(self): - return self.fspath, None, "syntax check" - - def runtest(self): - self.restcheck(py.path.svnwc(self.fspath)) - - def restcheck(self, path): - py.test.importorskip("docutils") - self.register_linkrole() - from docutils.utils import SystemMessage - try: - self._checkskip(path, self.project.get_htmloutputpath(path)) - self.project.process(path) - except KeyboardInterrupt: - raise - except SystemMessage: - # we assume docutils printed info on stdout - py.test.fail("docutils processing failed, see captured stderr") - - def register_linkrole(self): - #directive.register_linkrole('api', self.resolve_linkrole) - #directive.register_linkrole('source', self.resolve_linkrole) -# -# # XXX fake sphinx' "toctree" and refs -# directive.register_linkrole('ref', self.resolve_linkrole) - - from docutils.parsers.rst import directives - def toctree_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - toctree_directive.content = 1 - toctree_directive.options = {'maxdepth': int, 'glob': directives.flag, - 'hidden': directives.flag} - directives.register_directive('toctree', toctree_directive) - self.register_pygments() - - def register_pygments(self): - # taken from pygments-main/external/rst-directive.py - from docutils.parsers.rst import directives - try: - from pygments.formatters import HtmlFormatter - except ImportError: - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - return [] - pygments_directive.options = {} - else: - # The default formatter - DEFAULT = HtmlFormatter(noclasses=True) - # Add name -> formatter pairs for every variant you want to use - VARIANTS = { - # 'linenos': HtmlFormatter(noclasses=INLINESTYLES, linenos=True), - } - - from docutils import nodes - - from pygments import highlight - from pygments.lexers import get_lexer_by_name, TextLexer - - def pygments_directive(name, arguments, options, content, lineno, - content_offset, block_text, state, state_machine): - try: - lexer = get_lexer_by_name(arguments[0]) - except ValueError: - # no lexer found - use the text one instead of an exception - lexer = TextLexer() - # take an arbitrary option if more than one is given - formatter = options and VARIANTS[options.keys()[0]] or DEFAULT - parsed = highlight('\n'.join(content), lexer, formatter) - return [nodes.raw('', parsed, format='html')] - - pygments_directive.options = dict([(key, directives.flag) for key in VARIANTS]) - - pygments_directive.arguments = (1, 0, 1) - pygments_directive.content = 1 - directives.register_directive('sourcecode', pygments_directive) - - def resolve_linkrole(self, name, text, check=True): - apigen_relpath = self.project.apigen_relpath - - if name == 'api': - if text == 'py': - return ('py', apigen_relpath + 'api/index.html') - else: - assert text.startswith('py.'), ( - 'api link "%s" does not point to the py package') % (text,) - dotted_name = text - if dotted_name.find('(') > -1: - dotted_name = dotted_name[:text.find('(')] - # remove pkg root - path = dotted_name.split('.')[1:] - dotted_name = '.'.join(path) - obj = py - if check: - for chunk in path: - try: - obj = getattr(obj, chunk) - except AttributeError: - raise AssertionError( - 'problem with linkrole :api:`%s`: can not resolve ' - 'dotted name %s' % (text, dotted_name,)) - return (text, apigen_relpath + 'api/%s.html' % (dotted_name,)) - elif name == 'source': - assert text.startswith('py/'), ('source link "%s" does not point ' - 'to the py package') % (text,) - relpath = '/'.join(text.split('/')[1:]) - if check: - pkgroot = py._pydir - abspath = pkgroot.join(relpath) - assert pkgroot.join(relpath).check(), ( - 'problem with linkrole :source:`%s`: ' - 'path %s does not exist' % (text, relpath)) - if relpath.endswith('/') or not relpath: - relpath += 'index.html' - else: - relpath += '.html' - return (text, apigen_relpath + 'source/%s' % (relpath,)) - elif name == 'ref': - return ("", "") - - def _checkskip(self, lpath, htmlpath=None): - if not self.config.getvalue("forcegen"): - lpath = py.path.local(lpath) - if htmlpath is not None: - htmlpath = py.path.local(htmlpath) - if lpath.ext == '.txt': - htmlpath = htmlpath or lpath.new(ext='.html') - if htmlpath.check(file=1) and htmlpath.mtime() >= lpath.mtime(): - py.test.skip("html file is up to date, use --forcegen to regenerate") - #return [] # no need to rebuild - -class DoctestText(py.test.collect.Item): - def reportinfo(self): - return self.fspath, None, "doctest" - - def runtest(self): - content = self._normalize_linesep() - newcontent = self.config.hook.pytest_doctest_prepare_content(content=content) - if newcontent is not None: - content = newcontent - s = content - l = [] - prefix = '.. >>> ' - mod = py.std.types.ModuleType(self.fspath.purebasename) - skipchunk = False - for line in deindent(s).split('\n'): - stripped = line.strip() - if skipchunk and line.startswith(skipchunk): - py.builtin.print_("skipping", line) - continue - skipchunk = False - if stripped.startswith(prefix): - try: - py.builtin.exec_(py.code.Source( - stripped[len(prefix):]).compile(), mod.__dict__) - except ValueError: - e = sys.exc_info()[1] - if e.args and e.args[0] == "skipchunk": - skipchunk = " " * (len(line) - len(line.lstrip())) - else: - raise - else: - l.append(line) - docstring = "\n".join(l) - mod.__doc__ = docstring - failed, tot = py.std.doctest.testmod(mod, verbose=1) - if failed: - py.test.fail("doctest %s: %s failed out of %s" %( - self.fspath, failed, tot)) - - def _normalize_linesep(self): - # XXX quite nasty... but it works (fixes win32 issues) - s = self.fspath.read() - linesep = '\n' - if '\r' in s: - if '\n' not in s: - linesep = '\r' - else: - linesep = '\r\n' - s = s.replace(linesep, '\n') - return s - -class LinkCheckerMaker(py.test.collect.Collector): - def collect(self): - return list(self.genlinkchecks()) - - def genlinkchecks(self): - path = self.fspath - # generating functions + args as single tests - timeout = self.config.getvalue("urlcheck_timeout") - for lineno, line in enumerate(path.readlines()): - line = line.strip() - if line.startswith('.. _'): - if line.startswith('.. _`'): - delim = '`:' - else: - delim = ':' - l = line.split(delim, 1) - if len(l) != 2: - continue - tryfn = l[1].strip() - name = "%s:%d" %(tryfn, lineno) - if tryfn.startswith('http:') or tryfn.startswith('https'): - if self.config.getvalue("urlcheck"): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno, timeout), checkfunc=urlcheck) - elif tryfn.startswith('webcal:'): - continue - else: - i = tryfn.find('#') - if i != -1: - checkfn = tryfn[:i] - else: - checkfn = tryfn - if checkfn.strip() and (1 or checkfn.endswith('.html')): - yield CheckLink(name, parent=self, - args=(tryfn, path, lineno), checkfunc=localrefcheck) - -class CheckLink(py.test.collect.Item): - def __init__(self, name, parent, args, checkfunc): - super(CheckLink, self).__init__(name, parent) - self.args = args - self.checkfunc = checkfunc - - def runtest(self): - return self.checkfunc(*self.args) - - def reportinfo(self, basedir=None): - return (self.fspath, self.args[2], "checklink: %s" % self.args[0]) - -def urlcheck(tryfn, path, lineno, TIMEOUT_URLOPEN): - old = py.std.socket.getdefaulttimeout() - py.std.socket.setdefaulttimeout(TIMEOUT_URLOPEN) - try: - try: - py.builtin.print_("trying remote", tryfn) - py.std.urllib2.urlopen(tryfn) - finally: - py.std.socket.setdefaulttimeout(old) - except (py.std.urllib2.URLError, py.std.urllib2.HTTPError): - e = sys.exc_info()[1] - if getattr(e, 'code', None) in (401, 403): # authorization required, forbidden - py.test.skip("%s: %s" %(tryfn, str(e))) - else: - py.test.fail("remote reference error %r in %s:%d\n%s" %( - tryfn, path.basename, lineno+1, e)) - -def localrefcheck(tryfn, path, lineno): - # assume it should be a file - i = tryfn.find('#') - if tryfn.startswith('javascript:'): - return # don't check JS refs - if i != -1: - anchor = tryfn[i+1:] - tryfn = tryfn[:i] - else: - anchor = '' - fn = path.dirpath(tryfn) - ishtml = fn.ext == '.html' - fn = ishtml and fn.new(ext='.txt') or fn - py.builtin.print_("filename is", fn) - if not fn.check(): # not ishtml or not fn.check(): - if not py.path.local(tryfn).check(): # the html could be there - py.test.fail("reference error %r in %s:%d" %( - tryfn, path.basename, lineno+1)) - if anchor: - source = unicode(fn.read(), 'latin1') - source = source.lower().replace('-', ' ') # aehem - - anchor = anchor.replace('-', ' ') - match2 = ".. _`%s`:" % anchor - match3 = ".. _%s:" % anchor - candidates = (anchor, match2, match3) - py.builtin.print_("candidates", repr(candidates)) - for line in source.split('\n'): - line = line.strip() - if line in candidates: - break - else: - py.test.fail("anchor reference error %s#%s in %s:%d" %( - tryfn, anchor, path.basename, lineno+1)) - -if hasattr(sys.stdout, 'fileno') and os.isatty(sys.stdout.fileno()): - def log(msg): - print(msg) -else: - def log(msg): - pass - -def convert_rest_html(source, source_path, stylesheet=None, encoding='latin1'): - """ return html latin1-encoded document for the given input. - source a ReST-string - sourcepath where to look for includes (basically) - stylesheet path (to be used if any) - """ - from docutils.core import publish_string - kwargs = { - 'stylesheet' : stylesheet, - 'stylesheet_path': None, - 'traceback' : 1, - 'embed_stylesheet': 0, - 'output_encoding' : encoding, - #'halt' : 0, # 'info', - 'halt_level' : 2, - } - # docutils uses os.getcwd() :-( - source_path = os.path.abspath(str(source_path)) - prevdir = os.getcwd() - try: - #os.chdir(os.path.dirname(source_path)) - return publish_string(source, source_path, writer_name='html', - settings_overrides=kwargs) - finally: - os.chdir(prevdir) - -def process(txtpath, encoding='latin1'): - """ process a textfile """ - log("processing %s" % txtpath) - assert txtpath.check(ext='.txt') - if isinstance(txtpath, py.path.svnwc): - txtpath = txtpath.localpath - htmlpath = txtpath.new(ext='.html') - #svninfopath = txtpath.localpath.new(ext='.svninfo') - - style = txtpath.dirpath('style.css') - if style.check(): - stylesheet = style.basename - else: - stylesheet = None - content = unicode(txtpath.read(), encoding) - doc = convert_rest_html(content, txtpath, stylesheet=stylesheet, encoding=encoding) - htmlpath.open('wb').write(doc) - #log("wrote %r" % htmlpath) - #if txtpath.check(svnwc=1, versioned=1): - # info = txtpath.info() - # svninfopath.dump(info) - -if sys.version_info > (3, 0): - def _uni(s): return s -else: - def _uni(s): - return unicode(s) - -rex1 = re.compile(r'.*(.*).*', re.MULTILINE | re.DOTALL) -rex2 = re.compile(r'.*
(.*)
.*', re.MULTILINE | re.DOTALL) - -def strip_html_header(string, encoding='utf8'): - """ return the content of the body-tag """ - uni = unicode(string, encoding) - for rex in rex1,rex2: - match = rex.search(uni) - if not match: - break - uni = match.group(1) - return uni - -class Project: # used for confrest.py files - def __init__(self, sourcepath): - self.sourcepath = sourcepath - def process(self, path): - return process(path) - def get_htmloutputpath(self, path): - return path.new(ext='html') diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/py/_plugin/pytest_helpconfig.py b/py/_plugin/pytest_helpconfig.py deleted file mode 100644 --- a/py/_plugin/pytest_helpconfig.py +++ /dev/null @@ -1,164 +0,0 @@ -""" provide version info, conftest/environment config names. -""" -import py -import inspect, sys - -def pytest_addoption(parser): - group = parser.getgroup('debugconfig') - group.addoption('--version', action="store_true", - help="display py lib version and import information.") - group._addoption('-p', action="append", dest="plugins", default = [], - metavar="name", - help="early-load given plugin (multi-allowed).") - group.addoption('--traceconfig', - action="store_true", dest="traceconfig", default=False, - help="trace considerations of conftest.py files."), - group._addoption('--nomagic', - action="store_true", dest="nomagic", default=False, - help="don't reinterpret asserts, no traceback cutting. ") - group.addoption('--debug', - action="store_true", dest="debug", default=False, - help="generate and show internal debugging information.") - group.addoption("--help-config", action="store_true", dest="helpconfig", - help="show available conftest.py and ENV-variable names.") - - -def pytest_configure(__multicall__, config): - if config.option.version: - p = py.path.local(py.__file__).dirpath() - sys.stderr.write("This is py.test version %s, imported from %s\n" % - (py.__version__, p)) - sys.exit(0) - if not config.option.helpconfig: - return - __multicall__.execute() - options = [] - for group in config._parser._groups: - options.extend(group.options) - widths = [0] * 10 - tw = py.io.TerminalWriter() - tw.sep("-") - tw.line("%-13s | %-18s | %-25s | %s" %( - "cmdline name", "conftest.py name", "ENV-variable name", "help")) - tw.sep("-") - - options = [opt for opt in options if opt._long_opts] - options.sort(key=lambda x: x._long_opts) - for opt in options: - if not opt._long_opts or not opt.dest: - continue - optstrings = list(opt._long_opts) # + list(opt._short_opts) - optstrings = filter(None, optstrings) - optstring = "|".join(optstrings) - line = "%-13s | %-18s | %-25s | %s" %( - optstring, - "option_%s" % opt.dest, - "PYTEST_OPTION_%s" % opt.dest.upper(), - opt.help and opt.help or "", - ) - tw.line(line[:tw.fullwidth]) - for name, help in conftest_options: - line = "%-13s | %-18s | %-25s | %s" %( - "", - name, - "", - help, - ) - tw.line(line[:tw.fullwidth]) - - tw.sep("-") - sys.exit(0) - -conftest_options = ( - ('pytest_plugins', 'list of plugin names to load'), - ('collect_ignore', '(relative) paths ignored during collection'), - ('rsyncdirs', 'to-be-rsynced directories for dist-testing'), -) - -def pytest_report_header(config): - lines = [] - if config.option.debug or config.option.traceconfig: - lines.append("using py lib: %s" % (py.path.local(py.__file__).dirpath())) - if config.option.traceconfig: - lines.append("active plugins:") - plugins = [] - items = config.pluginmanager._name2plugin.items() - for name, plugin in items: - lines.append(" %-20s: %s" %(name, repr(plugin))) - return lines - - -# ===================================================== -# validate plugin syntax and hooks -# ===================================================== - -def pytest_plugin_registered(manager, plugin): - methods = collectattr(plugin) - hooks = {} - for hookspec in manager.hook._hookspecs: - hooks.update(collectattr(hookspec)) - - stringio = py.io.TextIO() - def Print(*args): - if args: - stringio.write(" ".join(map(str, args))) - stringio.write("\n") - - fail = False - while methods: - name, method = methods.popitem() - #print "checking", name - if isgenerichook(name): - continue - if name not in hooks: - if not getattr(method, 'optionalhook', False): - Print("found unknown hook:", name) - fail = True - else: - #print "checking", method - method_args = getargs(method) - #print "method_args", method_args - if '__multicall__' in method_args: - method_args.remove('__multicall__') - hook = hooks[name] - hookargs = getargs(hook) - for arg in method_args: - if arg not in hookargs: - Print("argument %r not available" %(arg, )) - Print("actual definition: %s" %(formatdef(method))) - Print("available hook arguments: %s" % - ", ".join(hookargs)) - fail = True - break - #if not fail: - # print "matching hook:", formatdef(method) - if fail: - name = getattr(plugin, '__name__', plugin) - raise PluginValidationError("%s:\n%s" %(name, stringio.getvalue())) - -class PluginValidationError(Exception): - """ plugin failed validation. """ - -def isgenerichook(name): - return name == "pytest_plugins" or \ - name.startswith("pytest_funcarg__") - -def getargs(func): - args = inspect.getargs(py.code.getrawcode(func))[0] - startindex = inspect.ismethod(func) and 1 or 0 - return args[startindex:] - -def collectattr(obj, prefixes=("pytest_",)): - methods = {} - for apiname in dir(obj): - for prefix in prefixes: - if apiname.startswith(prefix): - methods[apiname] = getattr(obj, apiname) - return methods - -def formatdef(func): - return "%s%s" %( - func.__name__, - inspect.formatargspec(*inspect.getargspec(func)) - ) - diff --git a/py/bin/py.which b/py/bin/py.which deleted file mode 100755 --- a/py/bin/py.which +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pywhich() \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py --- a/pypy/rpython/lltypesystem/rstr.py +++ b/pypy/rpython/lltypesystem/rstr.py @@ -515,7 +515,6 @@ return count @classmethod - @purefunction def ll_find(cls, s1, s2, start, end): if start < 0: start = 0 @@ -529,11 +528,10 @@ return start elif m == 1: return cls.ll_find_char(s1, s2.chars[0], start, end) - + return cls.ll_search(s1, s2, start, end, FAST_FIND) @classmethod - @purefunction def ll_rfind(cls, s1, s2, start, end): if start < 0: start = 0 @@ -547,11 +545,10 @@ return end elif m == 1: return cls.ll_rfind_char(s1, s2.chars[0], start, end) - + return cls.ll_search(s1, s2, start, end, FAST_RFIND) @classmethod - @purefunction def ll_count(cls, s1, s2, start, end): if start < 0: start = 0 @@ -565,7 +562,7 @@ return end - start + 1 elif m == 1: return cls.ll_count_char(s1, s2.chars[0], start, end) - + res = cls.ll_search(s1, s2, start, end, FAST_COUNT) # For a few cases ll_search can return -1 to indicate an "impossible" # condition for a string match, count just returns 0 in these cases. diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/py/bin/py.countloc b/py/bin/py.countloc deleted file mode 100755 --- a/py/bin/py.countloc +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pycountloc() \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/py/bin/env.cmd b/py/bin/env.cmd deleted file mode 100644 --- a/py/bin/env.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -for /F "usebackq delims=" %%i in (`python "%~dp0\env.py"`) do %%i diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/py/_path/gateway/__init__.py b/py/_path/gateway/__init__.py deleted file mode 100644 --- a/py/_path/gateway/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/py/_plugin/pytest_recwarn.py b/py/_plugin/pytest_recwarn.py deleted file mode 100644 --- a/py/_plugin/pytest_recwarn.py +++ /dev/null @@ -1,121 +0,0 @@ -""" -helpers for asserting deprecation and other warnings. - -Example usage ---------------------- - -You can use the ``recwarn`` funcarg to track -warnings within a test function: - -.. sourcecode:: python - - def test_hello(recwarn): - from warnings import warn - warn("hello", DeprecationWarning) - w = recwarn.pop(DeprecationWarning) - assert issubclass(w.category, DeprecationWarning) - assert 'hello' in str(w.message) - assert w.filename - assert w.lineno - -You can also call a global helper for checking -taht a certain function call yields a Deprecation -warning: - -.. sourcecode:: python - - import py - - def test_global(): - py.test.deprecated_call(myfunction, 17) - - -""" - -import py -import os - -def pytest_funcarg__recwarn(request): - """Return a WarningsRecorder instance that provides these methods: - - * ``pop(category=None)``: return last warning matching the category. - * ``clear()``: clear list of warnings - """ - warnings = WarningsRecorder() - request.addfinalizer(warnings.finalize) - return warnings - -def pytest_namespace(): - return {'deprecated_call': deprecated_call} - -def deprecated_call(func, *args, **kwargs): - """ assert that calling func(*args, **kwargs) - triggers a DeprecationWarning. - """ - warningmodule = py.std.warnings - l = [] - oldwarn_explicit = getattr(warningmodule, 'warn_explicit') - def warn_explicit(*args, **kwargs): - l.append(args) - oldwarn_explicit(*args, **kwargs) - oldwarn = getattr(warningmodule, 'warn') - def warn(*args, **kwargs): - l.append(args) - oldwarn(*args, **kwargs) - - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - try: - ret = func(*args, **kwargs) - finally: - warningmodule.warn_explicit = warn_explicit - warningmodule.warn = warn - if not l: - #print warningmodule - __tracebackhide__ = True - raise AssertionError("%r did not produce DeprecationWarning" %(func,)) - return ret - - -class RecordedWarning: - def __init__(self, message, category, filename, lineno, line): - self.message = message - self.category = category - self.filename = filename - self.lineno = lineno - self.line = line - -class WarningsRecorder: - def __init__(self): - warningmodule = py.std.warnings - self.list = [] - def showwarning(message, category, filename, lineno, line=0): - self.list.append(RecordedWarning( - message, category, filename, lineno, line)) - try: - self.old_showwarning(message, category, - filename, lineno, line=line) - except TypeError: - # < python2.6 - self.old_showwarning(message, category, filename, lineno) - self.old_showwarning = warningmodule.showwarning - warningmodule.showwarning = showwarning - - def pop(self, cls=Warning): - """ pop the first recorded warning, raise exception if not exists.""" - for i, w in enumerate(self.list): - if issubclass(w.category, cls): - return self.list.pop(i) - __tracebackhide__ = True - assert 0, "%r not found in %r" %(cls, self.list) - - #def resetregistry(self): - # import warnings - # warnings.onceregistry.clear() - # warnings.__warningregistry__.clear() - - def clear(self): - self.list[:] = [] - - def finalize(self): - py.std.warnings.showwarning = self.old_showwarning diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/py/bin/py.svnwcrevert b/py/bin/py.svnwcrevert deleted file mode 100755 --- a/py/bin/py.svnwcrevert +++ /dev/null @@ -1,3 +0,0 @@ -#!/usr/bin/env python -from _findpy import py -py.cmdline.pysvnwcrevert() \ No newline at end of file diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/py/_plugin/pytest_pdb.py b/py/_plugin/pytest_pdb.py deleted file mode 100644 --- a/py/_plugin/pytest_pdb.py +++ /dev/null @@ -1,105 +0,0 @@ -""" -interactive debugging with the Python Debugger. -""" -import py -import pdb, sys, linecache - -def pytest_addoption(parser): - group = parser.getgroup("general") - group._addoption('--pdb', - action="store_true", dest="usepdb", default=False, - help="start the interactive Python debugger on errors.") - -def pytest_configure(config): - if config.getvalue("usepdb"): - config.pluginmanager.register(PdbInvoke(), 'pdb') - -class PdbInvoke: - def pytest_runtest_makereport(self, item, call): - if call.excinfo and not \ - call.excinfo.errisinstance(py.test.skip.Exception): - # play well with capturing, slightly hackish - capman = item.config.pluginmanager.getplugin('capturemanager') - capman.suspendcapture() - - tw = py.io.TerminalWriter() - repr = call.excinfo.getrepr() - repr.toterminal(tw) - post_mortem(call.excinfo._excinfo[2]) - - capman.resumecapture_item(item) - -class Pdb(py.std.pdb.Pdb): - def do_list(self, arg): - self.lastcmd = 'list' - last = None - if arg: - try: - x = eval(arg, {}, {}) - if type(x) == type(()): - first, last = x - first = int(first) - last = int(last) - if last < first: - # Assume it's a count - last = first + last - else: - first = max(1, int(x) - 5) - except: - print ('*** Error in argument: %s' % repr(arg)) - return - elif self.lineno is None: - first = max(1, self.curframe.f_lineno - 5) - else: - first = self.lineno + 1 - if last is None: - last = first + 10 - filename = self.curframe.f_code.co_filename - breaklist = self.get_file_breaks(filename) - try: - for lineno in range(first, last+1): - # start difference from normal do_line - line = self._getline(filename, lineno) - # end difference from normal do_line - if not line: - print ('[EOF]') - break - else: - s = repr(lineno).rjust(3) - if len(s) < 4: s = s + ' ' - if lineno in breaklist: s = s + 'B' - else: s = s + ' ' - if lineno == self.curframe.f_lineno: - s = s + '->' - sys.stdout.write(s + '\t' + line) - self.lineno = lineno - except KeyboardInterrupt: - pass - do_l = do_list - - def _getline(self, filename, lineno): - if hasattr(filename, "__source__"): - try: - return filename.__source__.lines[lineno - 1] + "\n" - except IndexError: - return None - return linecache.getline(filename, lineno) - - def get_stack(self, f, t): - # Modified from bdb.py to be able to walk the stack beyond generators, - # which does not work in the normal pdb :-( - stack, i = pdb.Pdb.get_stack(self, f, t) - if f is None: - i = max(0, len(stack) - 1) - while i and stack[i][0].f_locals.get("__tracebackhide__", False): - i-=1 - return stack, i - -def post_mortem(t): - p = Pdb() - p.reset() - p.interaction(None, t) - -def set_trace(): - # again, a copy of the version in pdb.py - Pdb().set_trace(sys._getframe().f_back) diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/interpreter/pycode.py b/pypy/interpreter/pycode.py --- a/pypy/interpreter/pycode.py +++ b/pypy/interpreter/pycode.py @@ -9,8 +9,7 @@ from pypy.interpreter import eval from pypy.interpreter.argument import Signature from pypy.interpreter.error import OperationError -from pypy.interpreter.gateway import NoneNotWrapped -from pypy.interpreter.baseobjspace import ObjSpace, W_Root +from pypy.interpreter.gateway import NoneNotWrapped, unwrap_spec from pypy.interpreter.astcompiler.consts import (CO_OPTIMIZED, CO_OPTIMIZED, CO_NEWLOCALS, CO_VARARGS, CO_VARKEYWORDS, CO_NESTED, CO_GENERATOR, CO_CONTAINSGLOBALS) @@ -87,7 +86,7 @@ self._init_flags() # Precompute what arguments need to be copied into cellvars self._args_as_cellvars = [] - + if self.co_cellvars: argcount = self.co_argcount assert argcount >= 0 # annotator hint @@ -147,7 +146,7 @@ def signature(self): return self._signature - + @classmethod def _from_code(cls, space, code, hidden_applevel=False, code_hook=None): """ Initialize the code object from a real (CPython) one. @@ -183,7 +182,7 @@ list(code.co_cellvars), hidden_applevel, cpython_magic) - + def _compute_flatcall(self): # Speed hack! self.fast_natural_arity = eval.Code.HOPELESS @@ -193,7 +192,7 @@ return if self.co_argcount > 0xff: return - + self.fast_natural_arity = eval.Code.FLATPYCALL | self.co_argcount def funcrun(self, func, args): @@ -205,7 +204,7 @@ fresh_virtualizable=True) args_matched = args.parse_into_scope(None, fresh_frame.fastlocals_w, func.name, - sig, func.defs_w) + sig, func.defs) fresh_frame.init_cells() return frame.run() @@ -215,10 +214,10 @@ sig = self._signature # speed hack fresh_frame = jit.hint(frame, access_directly=True, - fresh_virtualizable=True) + fresh_virtualizable=True) args_matched = args.parse_into_scope(w_obj, fresh_frame.fastlocals_w, func.name, - sig, func.defs_w) + sig, func.defs) fresh_frame.init_cells() return frame.run() @@ -268,20 +267,20 @@ co = self._to_code() dis.dis(co) - def fget_co_consts(space, self): + def fget_co_consts(self, space): return space.newtuple(self.co_consts_w) - - def fget_co_names(space, self): + + def fget_co_names(self, space): return space.newtuple(self.co_names_w) - def fget_co_varnames(space, self): + def fget_co_varnames(self, space): return space.newtuple([space.wrap(name) for name in self.co_varnames]) - def fget_co_cellvars(space, self): + def fget_co_cellvars(self, space): return space.newtuple([space.wrap(name) for name in self.co_cellvars]) - def fget_co_freevars(space, self): - return space.newtuple([space.wrap(name) for name in self.co_freevars]) + def fget_co_freevars(self, space): + return space.newtuple([space.wrap(name) for name in self.co_freevars]) def descr_code__eq__(self, w_other): space = self.space @@ -330,14 +329,10 @@ w_result = space.xor(w_result, space.hash(w_const)) return w_result - unwrap_spec = [ObjSpace, W_Root, - int, int, int, int, - str, W_Root, W_Root, - W_Root, str, str, int, - str, W_Root, - W_Root, int] - - + @unwrap_spec(argcount=int, nlocals=int, stacksize=int, flags=int, + codestring=str, + filename=str, name=str, firstlineno=int, + lnotab=str, magic=int) def descr_code__new__(space, w_subtype, argcount, nlocals, stacksize, flags, codestring, w_constants, w_names, @@ -369,7 +364,6 @@ PyCode.__init__(code, space, argcount, nlocals, stacksize, flags, codestring, consts_w[:], names, varnames, filename, name, firstlineno, lnotab, freevars, cellvars, magic=magic) return space.wrap(code) - descr_code__new__.unwrap_spec = unwrap_spec def descr__reduce__(self, space): from pypy.interpreter.mixedmodule import MixedModule @@ -378,18 +372,18 @@ new_inst = mod.get('code_new') w = space.wrap tup = [ - w(self.co_argcount), - w(self.co_nlocals), - w(self.co_stacksize), + w(self.co_argcount), + w(self.co_nlocals), + w(self.co_stacksize), w(self.co_flags), - w(self.co_code), - space.newtuple(self.co_consts_w), - space.newtuple(self.co_names_w), - space.newtuple([w(v) for v in self.co_varnames]), + w(self.co_code), + space.newtuple(self.co_consts_w), + space.newtuple(self.co_names_w), + space.newtuple([w(v) for v in self.co_varnames]), w(self.co_filename), - w(self.co_name), + w(self.co_name), w(self.co_firstlineno), - w(self.co_lnotab), + w(self.co_lnotab), space.newtuple([w(v) for v in self.co_freevars]), space.newtuple([w(v) for v in self.co_cellvars]), w(self.magic), @@ -402,4 +396,3 @@ def repr(self, space): return space.wrap(self.get_repr()) - repr.unwrap_spec = ['self', ObjSpace] diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -1,3 +1,4 @@ +import weakref from pypy.rlib import jit, objectmodel, debug from pypy.rlib.rarithmetic import intmask, r_uint @@ -92,6 +93,10 @@ return index def _index(self, selector): + while isinstance(self, PlainAttribute): + if selector == self.selector: + return self.position + self = self.back return -1 def copy(self, obj): @@ -215,15 +220,15 @@ class DevolvedDictTerminator(Terminator): def _read_terminator(self, obj, selector): if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) return space.finditem_str(w_dict, selector[0]) return Terminator._read_terminator(self, obj, selector) def _write_terminator(self, obj, selector, w_value): if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) space.setitem_str(w_dict, selector[0], w_value) return True return Terminator._write_terminator(self, obj, selector, w_value) @@ -231,8 +236,8 @@ def delete(self, obj, selector): from pypy.interpreter.error import OperationError if selector[1] == DICT: - w_dict = obj.getdict() space = self.space + w_dict = obj.getdict(space) try: space.delitem(w_dict, space.wrap(selector[0])) except OperationError, ex: @@ -272,11 +277,6 @@ self._copy_attr(obj, new_obj) return new_obj - def _index(self, selector): - if selector == self.selector: - return self.position - return self.back._index(selector) - def copy(self, obj): new_obj = self.back.copy(obj) self._copy_attr(obj, new_obj) @@ -375,12 +375,12 @@ self._become(new_obj) return True - def getdict(self): + def getdict(self, space): w_dict = self._get_mapdict_map().read(self, ("dict", SPECIAL)) if w_dict is not None: assert isinstance(w_dict, W_DictMultiObject) return w_dict - w_dict = MapDictImplementation(self.space, self) + w_dict = MapDictImplementation(space, self) flag = self._get_mapdict_map().write(self, ("dict", SPECIAL), w_dict) assert flag return w_dict @@ -388,7 +388,7 @@ def setdict(self, space, w_dict): from pypy.interpreter.typedef import check_new_dictionary w_dict = check_new_dictionary(space, w_dict) - w_olddict = self.getdict() + w_olddict = self.getdict(space) assert isinstance(w_dict, W_DictMultiObject) if w_olddict.r_dict_content is None: w_olddict._as_rdict() @@ -506,6 +506,9 @@ memo_get_subclass_of_correct_size._annspecialcase_ = "specialize:memo" _subclass_cache = {} +erase_item, unerase_item = rerased.new_erasing_pair("mapdict storage item") +erase_list, unerase_list = rerased.new_erasing_pair("mapdict storage list") + def _make_subclass_size_n(supercls, n, use_erased=True): from pypy.rlib import unroll rangen = unroll.unrolling_iterable(range(n)) @@ -535,19 +538,22 @@ def _mapdict_get_storage_list(self): erased = getattr(self, "_value%s" % nmin1) if use_erased: - return rerased.unerase_fixedsizelist(erased, W_Root) + return unerase_list(erased) else: assert isinstance(erased, ExtraAttributes) return erased.storage def _mapdict_read_storage(self, index): - for i in rangenmin1: - if index == i: - return getattr(self, "_value%s" % i) + assert index >= 0 + if index < nmin1: + for i in rangenmin1: + if index == i: + erased = getattr(self, "_value%s" % i) + return unerase_item(erased) if self._has_storage_list(): return self._mapdict_get_storage_list()[index - nmin1] erased = getattr(self, "_value%s" % nmin1) - return unerase(erased, W_Root) + return unerase_item(erased) def _mapdict_write_storage(self, index, value): for i in rangenmin1: @@ -557,7 +563,7 @@ if self._has_storage_list(): self._mapdict_get_storage_list()[index - nmin1] = value return - erased = erase(value) + erased = erase_item(value) setattr(self, "_value%s" % nmin1, erased) def _mapdict_storage_length(self): @@ -577,21 +583,21 @@ has_storage_list = self._has_storage_list() if len_storage < n: assert not has_storage_list - erased = erase(None) + erased = erase_item(None) elif len_storage == n: assert not has_storage_list - erased = erase(storage[nmin1]) + erased = erase_item(storage[nmin1]) elif not has_storage_list: # storage is longer than self.map.length() only due to # overallocation - erased = erase(storage[nmin1]) + erased = erase_item(storage[nmin1]) # in theory, we should be ultra-paranoid and check all entries, # but checking just one should catch most problems anyway: assert storage[n] is None else: storage_list = storage[nmin1:] if use_erased: - erased = rerased.erase_fixedsizelist(storage_list, W_Root) + erased = erase_list(storage_list) else: erased = ExtraAttributes(storage_list) setattr(self, "_value%s" % nmin1, erased) @@ -680,7 +686,7 @@ def materialize_r_dict(space, obj, w_d): map = obj._get_mapdict_map() - assert obj.getdict() is w_d + assert obj.getdict(space) is w_d new_obj = map.materialize_r_dict(space, obj, w_d) _become(obj, new_obj) @@ -709,7 +715,6 @@ # Magic caching class CacheEntry(object): - map = None version_tag = None index = 0 w_method = None # for callmethod @@ -720,8 +725,11 @@ map = w_obj._get_mapdict_map() return self.is_valid_for_map(map) + @jit.dont_look_inside def is_valid_for_map(self, map): - if map is self.map: + # note that 'map' can be None here + mymap = self.map_wref() + if mymap is not None and mymap is map: version_tag = map.terminator.w_cls.version_tag() if version_tag is self.version_tag: # everything matches, it's incredibly fast @@ -730,22 +738,23 @@ return True return False +_invalid_cache_entry_map = objectmodel.instantiate(AbstractAttribute) +_invalid_cache_entry_map.terminator = None INVALID_CACHE_ENTRY = CacheEntry() -INVALID_CACHE_ENTRY.map = objectmodel.instantiate(AbstractAttribute) - # different from any real map ^^^ -INVALID_CACHE_ENTRY.map.terminator = None - +INVALID_CACHE_ENTRY.map_wref = weakref.ref(_invalid_cache_entry_map) + # different from any real map ^^^ def init_mapdict_cache(pycode): num_entries = len(pycode.co_names_w) pycode._mapdict_caches = [INVALID_CACHE_ENTRY] * num_entries + at jit.dont_look_inside def _fill_cache(pycode, nameindex, map, version_tag, index, w_method=None): entry = pycode._mapdict_caches[nameindex] if entry is INVALID_CACHE_ENTRY: entry = CacheEntry() pycode._mapdict_caches[nameindex] = entry - entry.map = map + entry.map_wref = weakref.ref(map) entry.version_tag = version_tag entry.index = index entry.w_method = w_method diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/py/_plugin/pytest_unittest.py b/py/_plugin/pytest_unittest.py deleted file mode 100644 --- a/py/_plugin/pytest_unittest.py +++ /dev/null @@ -1,81 +0,0 @@ -""" -automatically discover and run traditional "unittest.py" style tests. - -Usage ----------------- - -This plugin collects and runs Python `unittest.py style`_ tests. -It will automatically collect ``unittest.TestCase`` subclasses -and their ``test`` methods from the test modules of a project -(usually following the ``test_*.py`` pattern). - -This plugin is enabled by default. - -.. _`unittest.py style`: http://docs.python.org/library/unittest.html -""" -import py -import sys - -def pytest_pycollect_makeitem(collector, name, obj): - if 'unittest' not in sys.modules: - return # nobody derived unittest.TestCase - try: - isunit = issubclass(obj, py.std.unittest.TestCase) - except KeyboardInterrupt: - raise - except Exception: - pass - else: - if isunit: - return UnitTestCase(name, parent=collector) - -class UnitTestCase(py.test.collect.Class): - def collect(self): - return [UnitTestCaseInstance("()", self)] - - def setup(self): - pass - - def teardown(self): - pass - -_dummy = object() -class UnitTestCaseInstance(py.test.collect.Instance): - def collect(self): - loader = py.std.unittest.TestLoader() - names = loader.getTestCaseNames(self.obj.__class__) - l = [] - for name in names: - callobj = getattr(self.obj, name) - if py.builtin.callable(callobj): - l.append(UnitTestFunction(name, parent=self)) - return l - - def _getobj(self): - x = self.parent.obj - return self.parent.obj(methodName='run') - -class UnitTestFunction(py.test.collect.Function): - def __init__(self, name, parent, args=(), obj=_dummy, sort_value=None): - super(UnitTestFunction, self).__init__(name, parent) - self._args = args - if obj is not _dummy: - self._obj = obj - self._sort_value = sort_value - if hasattr(self.parent, 'newinstance'): - self.parent.newinstance() - self.obj = self._getobj() - - def runtest(self): - target = self.obj - args = self._args - target(*args) - - def setup(self): - instance = py.builtin._getimself(self.obj) - instance.setUp() - - def teardown(self): - instance = py.builtin._getimself(self.obj) - instance.tearDown() - diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/rpython/lltypesystem/lltype.py b/pypy/rpython/lltypesystem/lltype.py --- a/pypy/rpython/lltypesystem/lltype.py +++ b/pypy/rpython/lltypesystem/lltype.py @@ -13,6 +13,33 @@ TLS = tlsobject() +class WeakValueDictionary(weakref.WeakValueDictionary): + """A subclass of weakref.WeakValueDictionary + which resets the 'nested_hash_level' when keys are being deleted. + """ + def __init__(self, *args, **kwargs): + weakref.WeakValueDictionary.__init__(self, *args, **kwargs) + remove_base = self._remove + def remove(*args): + if safe_equal is None: + # The interpreter is shutting down, and the comparison + # function is already gone. + return + if TLS is None: # Happens when the interpreter is shutting down + return remove_base(*args) + nested_hash_level = TLS.nested_hash_level + try: + # The 'remove' function is called when an object dies. This + # can happen anywhere when they are reference cycles, + # especially when we are already computing another __hash__ + # value. It's not really a recursion in this case, so we + # reset the counter; otherwise the hash value may be be + # incorrect and the key won't be deleted. + TLS.nested_hash_level = 0 + remove_base(*args) + finally: + TLS.nested_hash_level = nested_hash_level + self._remove = remove class _uninitialized(object): def __init__(self, TYPE): @@ -368,6 +395,8 @@ return "{ %s }" % of._str_fields() else: return "%s { %s }" % (of._name, of._str_fields()) + elif self._hints.get('render_as_void'): + return 'void' else: return str(self.OF) _str_fields = saferecursive(_str_fields, '...') @@ -397,7 +426,7 @@ # behaves more or less like a Struct with fields item0, item1, ... # but also supports __getitem__(), __setitem__(), __len__(). - _cache = weakref.WeakValueDictionary() # cache the length-1 FixedSizeArrays + _cache = WeakValueDictionary() # cache the length-1 FixedSizeArrays def __new__(cls, OF, length, **kwds): if length == 1 and not kwds: try: @@ -633,7 +662,7 @@ class Ptr(LowLevelType): __name__ = property(lambda self: '%sPtr' % self.TO.__name__) - _cache = weakref.WeakValueDictionary() # cache the Ptrs + _cache = WeakValueDictionary() # cache the Ptrs def __new__(cls, TO, use_cache=True): if not isinstance(TO, ContainerType): raise TypeError, ("can only point to a Container type, " @@ -796,6 +825,8 @@ return cast_pointer(TGT, value) elif ORIG == llmemory.Address: return llmemory.cast_adr_to_ptr(value, TGT) + elif ORIG == Signed: + return cast_int_to_ptr(TGT, value) elif TGT == llmemory.Address and isinstance(ORIG, Ptr): return llmemory.cast_ptr_to_adr(value) elif TGT == Signed and isinstance(ORIG, Ptr) and ORIG.TO._gckind == 'raw': @@ -1125,6 +1156,11 @@ raise TypeError("cannot directly assign to container array items") T2 = typeOf(val) if T2 != T1: + from pypy.rpython.lltypesystem import rffi + if T1 is rffi.VOIDP and isinstance(T2, Ptr): + # Any pointer is convertible to void* + val = rffi.cast(rffi.VOIDP, val) + else: raise TypeError("%r items:\n" "expect %r\n" " got %r" % (self._T, T1, T2)) @@ -1164,6 +1200,7 @@ return '* %s' % (self._obj0,) def __call__(self, *args): + from pypy.rpython.lltypesystem import rffi if isinstance(self._T, FuncType): if len(args) != len(self._T.ARGS): raise TypeError,"calling %r with wrong argument number: %r" % (self._T, args) @@ -1177,11 +1214,19 @@ pass else: assert a == value + # None is acceptable for any pointer + elif isinstance(ARG, Ptr) and a is None: + pass + # Any pointer is convertible to void* + elif ARG is rffi.VOIDP and isinstance(typeOf(a), Ptr): + pass # special case: ARG can be a container type, in which # case a should be a pointer to it. This must also be # special-cased in the backends. - elif not (isinstance(ARG, ContainerType) - and typeOf(a) == Ptr(ARG)): + elif (isinstance(ARG, ContainerType) and + typeOf(a) == Ptr(ARG)): + pass + else: args_repr = [typeOf(arg) for arg in args] raise TypeError, ("calling %r with wrong argument " "types: %r" % (self._T, args_repr)) diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/py/_compat/__init__.py b/py/_compat/__init__.py deleted file mode 100644 --- a/py/_compat/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -""" compatibility modules (taken from 2.4.4) """ - diff --git a/py/_compat/dep_subprocess.py b/py/_compat/dep_subprocess.py deleted file mode 100644 --- a/py/_compat/dep_subprocess.py +++ /dev/null @@ -1,5 +0,0 @@ - -import py -py.log._apiwarn("1.1", "py.compat.subprocess deprecated, use standard library version.", -stacklevel="apipkg") -subprocess = py.std.subprocess diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/translator/c/primitive.py b/pypy/translator/c/primitive.py --- a/pypy/translator/c/primitive.py +++ b/pypy/translator/c/primitive.py @@ -1,7 +1,8 @@ import sys from pypy.rlib.objectmodel import Symbolic, ComputedIntSymbolic from pypy.rlib.objectmodel import CDefinedIntSymbolic -from pypy.rlib.rarithmetic import r_longlong, isinf, isnan +from pypy.rlib.rarithmetic import r_longlong +from pypy.rlib.rfloat import isinf, isnan from pypy.rpython.lltypesystem.lltype import * from pypy.rpython.lltypesystem import rffi, llgroup from pypy.rpython.lltypesystem.llmemory import Address, \ @@ -122,7 +123,7 @@ if ' ' <= value < '\x7f': return "'%s'" % (value.replace("\\", r"\\").replace("'", r"\'"),) else: - return '%d' % ord(value) + return '((char)%d)' % ord(value) def name_bool(value, db): return '%d' % value @@ -132,7 +133,7 @@ def name_unichar(value, db): assert type(value) is unicode and len(value) == 1 - return '%d' % ord(value) + return '((wchar_t)%d)' % ord(value) def name_address(value, db): if value: diff --git a/py/_plugin/pytest_pytester.py b/py/_plugin/pytest_pytester.py deleted file mode 100644 --- a/py/_plugin/pytest_pytester.py +++ /dev/null @@ -1,500 +0,0 @@ -""" -funcargs and support code for testing py.test's own functionality. -""" - -import py -import sys, os -import re -import inspect -import time -from py._test.config import Config as pytestConfig -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("pylib") - group.addoption('--tools-on-path', - action="store_true", dest="toolsonpath", default=False, - help=("discover tools on PATH instead of going through py.cmdline.") - ) - -pytest_plugins = '_pytest' - -def pytest_funcarg__linecomp(request): - return LineComp() - -def pytest_funcarg__LineMatcher(request): - return LineMatcher - -def pytest_funcarg__testdir(request): - tmptestdir = TmpTestdir(request) - return tmptestdir - -rex_outcome = re.compile("(\d+) (\w+)") -class RunResult: - def __init__(self, ret, outlines, errlines, duration): - self.ret = ret - self.outlines = outlines - self.errlines = errlines - self.stdout = LineMatcher(outlines) - self.stderr = LineMatcher(errlines) - self.duration = duration - - def parseoutcomes(self): - for line in reversed(self.outlines): - if 'seconds' in line: - outcomes = rex_outcome.findall(line) - if outcomes: - d = {} - for num, cat in outcomes: - d[cat] = int(num) - return d - -class TmpTestdir: - def __init__(self, request): - self.request = request - self._pytest = request.getfuncargvalue("_pytest") - # XXX remove duplication with tmpdir plugin - basetmp = request.config.ensuretemp("testdir") - name = request.function.__name__ - for i in range(100): - try: - tmpdir = basetmp.mkdir(name + str(i)) - except py.error.EEXIST: - continue - break - # we need to create another subdir - # because Directory.collect() currently loads - # conftest.py from sibling directories - self.tmpdir = tmpdir.mkdir(name) - self.plugins = [] - self._syspathremove = [] - self.chdir() # always chdir - self.request.addfinalizer(self.finalize) - - def __repr__(self): - return "" % (self.tmpdir,) - - def Config(self, topdir=None): - if topdir is None: - topdir = self.tmpdir.dirpath() - return pytestConfig(topdir=topdir) - - def finalize(self): - for p in self._syspathremove: - py.std.sys.path.remove(p) - if hasattr(self, '_olddir'): - self._olddir.chdir() - # delete modules that have been loaded from tmpdir - for name, mod in list(sys.modules.items()): - if mod: - fn = getattr(mod, '__file__', None) - if fn and fn.startswith(str(self.tmpdir)): - del sys.modules[name] - - def getreportrecorder(self, obj): - if hasattr(obj, 'config'): - obj = obj.config - if hasattr(obj, 'hook'): - obj = obj.hook - assert hasattr(obj, '_hookspecs'), obj - reprec = ReportRecorder(obj) - reprec.hookrecorder = self._pytest.gethookrecorder(obj) - reprec.hook = reprec.hookrecorder.hook - return reprec - - def chdir(self): - old = self.tmpdir.chdir() - if not hasattr(self, '_olddir'): - self._olddir = old - - def _makefile(self, ext, args, kwargs): - items = list(kwargs.items()) - if args: - source = "\n".join(map(str, args)) + "\n" - basename = self.request.function.__name__ - items.insert(0, (basename, source)) - ret = None - for name, value in items: - p = self.tmpdir.join(name).new(ext=ext) - source = str(py.code.Source(value)).lstrip() - p.write(source.encode("utf-8"), "wb") - if ret is None: - ret = p - return ret - - - def makefile(self, ext, *args, **kwargs): - return self._makefile(ext, args, kwargs) - - def makeconftest(self, source): - return self.makepyfile(conftest=source) - - def makepyfile(self, *args, **kwargs): - return self._makefile('.py', args, kwargs) - - def maketxtfile(self, *args, **kwargs): - return self._makefile('.txt', args, kwargs) - - def syspathinsert(self, path=None): - if path is None: - path = self.tmpdir - py.std.sys.path.insert(0, str(path)) - self._syspathremove.append(str(path)) - - def mkdir(self, name): - return self.tmpdir.mkdir(name) - - def mkpydir(self, name): - p = self.mkdir(name) - p.ensure("__init__.py") - return p - - def genitems(self, colitems): - return list(self.session.genitems(colitems)) - - def inline_genitems(self, *args): - #config = self.parseconfig(*args) - config = self.parseconfig(*args) - session = config.initsession() - rec = self.getreportrecorder(config) - colitems = [config.getnode(arg) for arg in config.args] - items = list(session.genitems(colitems)) - return items, rec - - def runitem(self, source): - # used from runner functional tests - item = self.getitem(source) - # the test class where we are called from wants to provide the runner - testclassinstance = py.builtin._getimself(self.request.function) - runner = testclassinstance.getrunner() - return runner(item) - - def inline_runsource(self, source, *cmdlineargs): - p = self.makepyfile(source) - l = list(cmdlineargs) + [p] - return self.inline_run(*l) - - def inline_runsource1(self, *args): - args = list(args) - source = args.pop() - p = self.makepyfile(source) - l = list(args) + [p] - reprec = self.inline_run(*l) - reports = reprec.getreports("pytest_runtest_logreport") - assert len(reports) == 1, reports - return reports[0] - - def inline_run(self, *args): - args = ("-s", ) + args # otherwise FD leakage - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - session = config.initsession() - reprec = self.getreportrecorder(config) - colitems = config.getinitialnodes() - session.main(colitems) - config.pluginmanager.do_unconfigure(config) - return reprec - - def config_preparse(self): - config = self.Config() - for plugin in self.plugins: - if isinstance(plugin, str): - config.pluginmanager.import_plugin(plugin) - else: - if isinstance(plugin, dict): - plugin = PseudoPlugin(plugin) - if not config.pluginmanager.isregistered(plugin): - config.pluginmanager.register(plugin) - return config - - def parseconfig(self, *args): - if not args: - args = (self.tmpdir,) - config = self.config_preparse() - args = list(args) + ["--basetemp=%s" % self.tmpdir.dirpath('basetemp')] - config.parse(args) - return config - - def reparseconfig(self, args=None): - """ this is used from tests that want to re-invoke parse(). """ - if not args: - args = [self.tmpdir] - from py._test import config - oldconfig = config.config_per_process # py.test.config - try: - c = config.config_per_process = py.test.config = pytestConfig() - c.basetemp = oldconfig.mktemp("reparse", numbered=True) - c.parse(args) - return c - finally: - config.config_per_process = py.test.config = oldconfig - - def parseconfigure(self, *args): - config = self.parseconfig(*args) - config.pluginmanager.do_configure(config) - return config - - def getitem(self, source, funcname="test_func"): - modcol = self.getmodulecol(source) - moditems = modcol.collect() - for item in modcol.collect(): - if item.name == funcname: - return item - else: - assert 0, "%r item not found in module:\n%s" %(funcname, source) - - def getitems(self, source): - modcol = self.getmodulecol(source) - return list(modcol.config.initsession().genitems([modcol])) - #assert item is not None, "%r item not found in module:\n%s" %(funcname, source) - #return item - - def getfscol(self, path, configargs=()): - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - return self.config.getnode(path) - - def getmodulecol(self, source, configargs=(), withinit=False): - kw = {self.request.function.__name__: py.code.Source(source).strip()} - path = self.makepyfile(**kw) - if withinit: - self.makepyfile(__init__ = "#") - self.config = self.parseconfig(path, *configargs) - self.session = self.config.initsession() - #self.config.pluginmanager.do_configure(config=self.config) - # XXX - self.config.pluginmanager.import_plugin("runner") - plugin = self.config.pluginmanager.getplugin("runner") - plugin.pytest_configure(config=self.config) - - return self.config.getnode(path) - - def popen(self, cmdargs, stdout, stderr, **kw): - if not hasattr(py.std, 'subprocess'): - py.test.skip("no subprocess module") - env = os.environ.copy() - env['PYTHONPATH'] = ":".join(filter(None, [ - str(os.getcwd()), env.get('PYTHONPATH', '')])) - kw['env'] = env - #print "env", env - return py.std.subprocess.Popen(cmdargs, stdout=stdout, stderr=stderr, **kw) - - def run(self, *cmdargs): - return self._run(*cmdargs) - - def _run(self, *cmdargs): - cmdargs = [str(x) for x in cmdargs] - p1 = self.tmpdir.join("stdout") - p2 = self.tmpdir.join("stderr") - print_("running", cmdargs, "curdir=", py.path.local()) - f1 = p1.open("wb") - f2 = p2.open("wb") - now = time.time() - popen = self.popen(cmdargs, stdout=f1, stderr=f2, - close_fds=(sys.platform != "win32")) - ret = popen.wait() - f1.close() - f2.close() - out = p1.read("rb") - out = getdecoded(out).splitlines() - err = p2.read("rb") - err = getdecoded(err).splitlines() - def dump_lines(lines, fp): - try: - for line in lines: - py.builtin.print_(line, file=fp) - except UnicodeEncodeError: - print("couldn't print to %s because of encoding" % (fp,)) - dump_lines(out, sys.stdout) - dump_lines(err, sys.stderr) - return RunResult(ret, out, err, time.time()-now) - - def runpybin(self, scriptname, *args): - fullargs = self._getpybinargs(scriptname) + args - return self.run(*fullargs) - - def _getpybinargs(self, scriptname): - if self.request.config.getvalue("toolsonpath"): - script = py.path.local.sysfind(scriptname) - assert script, "script %r not found" % scriptname - return (script,) - else: - cmdlinename = scriptname.replace(".", "") - assert hasattr(py.cmdline, cmdlinename), cmdlinename - source = ("import sys;sys.path.insert(0,%r);" - "import py;py.cmdline.%s()" % - (str(py._pydir.dirpath()), cmdlinename)) - return (sys.executable, "-c", source,) - - def runpython(self, script): - s = self._getsysprepend() - if s: - script.write(s + "\n" + script.read()) - return self.run(sys.executable, script) - - def _getsysprepend(self): - if not self.request.config.getvalue("toolsonpath"): - s = "import sys;sys.path.insert(0,%r);" % str(py._pydir.dirpath()) - else: - s = "" - return s - - def runpython_c(self, command): - command = self._getsysprepend() + command - return self.run(py.std.sys.executable, "-c", command) - - def runpytest(self, *args): - p = py.path.local.make_numbered_dir(prefix="runpytest-", - keep=None, rootdir=self.tmpdir) - args = ('--basetemp=%s' % p, ) + args - plugins = [x for x in self.plugins if isinstance(x, str)] - if plugins: - args = ('-p', plugins[0]) + args - return self.runpybin("py.test", *args) - - def spawn_pytest(self, string, expect_timeout=10.0): - pexpect = py.test.importorskip("pexpect", "2.4") - if not self.request.config.getvalue("toolsonpath"): - py.test.skip("need --tools-on-path to run py.test script") - basetemp = self.tmpdir.mkdir("pexpect") - invoke = self._getpybinargs("py.test")[0] - cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) - child = pexpect.spawn(cmd, logfile=basetemp.join("spawn.out").open("w")) - child.timeout = expect_timeout - return child - -def getdecoded(out): - try: - return out.decode("utf-8") - except UnicodeDecodeError: - return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( - py.io.saferepr(out),) - -class PseudoPlugin: - def __init__(self, vars): - self.__dict__.update(vars) - -class ReportRecorder(object): - def __init__(self, hook): - self.hook = hook - self.registry = hook._registry - self.registry.register(self) - - def getcall(self, name): - return self.hookrecorder.getcall(name) - - def popcall(self, name): - return self.hookrecorder.popcall(name) - - def getcalls(self, names): - """ return list of ParsedCall instances matching the given eventname. """ - return self.hookrecorder.getcalls(names) - - # functionality for test reports - - def getreports(self, names="pytest_runtest_logreport pytest_collectreport"): - return [x.report for x in self.getcalls(names)] - - def matchreport(self, inamepart="", names="pytest_runtest_logreport pytest_collectreport"): - """ return a testreport whose dotted import path matches """ - l = [] - for rep in self.getreports(names=names): - colitem = rep.getnode() - if not inamepart or inamepart in colitem.listnames(): - l.append(rep) - if not l: - raise ValueError("could not find test report matching %r: no test reports at all!" % - (inamepart,)) - if len(l) > 1: - raise ValueError("found more than one testreport matching %r: %s" %( - inamepart, l)) - return l[0] - - def getfailures(self, names='pytest_runtest_logreport pytest_collectreport'): - return [rep for rep in self.getreports(names) if rep.failed] - - def getfailedcollections(self): - return self.getfailures('pytest_collectreport') - - def listoutcomes(self): - passed = [] - skipped = [] - failed = [] - for rep in self.getreports("pytest_runtest_logreport"): - if rep.passed: - if rep.when == "call": - passed.append(rep) - elif rep.skipped: - skipped.append(rep) - elif rep.failed: - failed.append(rep) - return passed, skipped, failed - - def countoutcomes(self): - return [len(x) for x in self.listoutcomes()] - - def assertoutcome(self, passed=0, skipped=0, failed=0): - realpassed, realskipped, realfailed = self.listoutcomes() - assert passed == len(realpassed) - assert skipped == len(realskipped) - assert failed == len(realfailed) - - def clear(self): - self.hookrecorder.calls[:] = [] - - def unregister(self): - self.registry.unregister(self) - self.hookrecorder.finish_recording() - -class LineComp: - def __init__(self): - self.stringio = py.io.TextIO() - - def assert_contains_lines(self, lines2): - """ assert that lines2 are contained (linearly) in lines1. - return a list of extralines found. - """ - __tracebackhide__ = True - val = self.stringio.getvalue() - self.stringio.truncate(0) - self.stringio.seek(0) - lines1 = val.split("\n") - return LineMatcher(lines1).fnmatch_lines(lines2) - -class LineMatcher: - def __init__(self, lines): - self.lines = lines - - def str(self): - return "\n".join(self.lines) - - def fnmatch_lines(self, lines2): - if isinstance(lines2, str): - lines2 = py.code.Source(lines2) - if isinstance(lines2, py.code.Source): - lines2 = lines2.strip().lines - - from fnmatch import fnmatch - lines1 = self.lines[:] - nextline = None - extralines = [] - __tracebackhide__ = True - for line in lines2: - nomatchprinted = False - while lines1: - nextline = lines1.pop(0) - if line == nextline: - print_("exact match:", repr(line)) - break - elif fnmatch(nextline, line): - print_("fnmatch:", repr(line)) - print_(" with:", repr(nextline)) - break - else: - if not nomatchprinted: - print_("nomatch:", repr(line)) - nomatchprinted = True - print_(" and:", repr(nextline)) - extralines.append(nextline) - else: - assert line == nextline diff --git a/py/_plugin/pytest_monkeypatch.py b/py/_plugin/pytest_monkeypatch.py deleted file mode 100644 --- a/py/_plugin/pytest_monkeypatch.py +++ /dev/null @@ -1,141 +0,0 @@ -""" -safely patch object attributes, dicts and environment variables. - -Usage ----------------- - -Use the `monkeypatch funcarg`_ to tweak your global test environment -for running a particular test. You can safely set/del an attribute, -dictionary item or environment variable by respective methods -on the monkeypatch funcarg. If you want e.g. to set an ENV1 variable -and have os.path.expanduser return a particular directory, you can -write it down like this: - -.. sourcecode:: python - - def test_mytest(monkeypatch): - monkeypatch.setenv('ENV1', 'myval') - monkeypatch.setattr(os.path, 'expanduser', lambda x: '/tmp/xyz') - ... # your test code that uses those patched values implicitely - -After the test function finished all modifications will be undone, -because the ``monkeypatch.undo()`` method is registered as a finalizer. - -``monkeypatch.setattr/delattr/delitem/delenv()`` all -by default raise an Exception if the target does not exist. -Pass ``raising=False`` if you want to skip this check. - -prepending to PATH or other environment variables ---------------------------------------------------------- - -To prepend a value to an already existing environment parameter: - -.. sourcecode:: python - - def test_mypath_finding(monkeypatch): - monkeypatch.setenv('PATH', 'x/y', prepend=":") - # in bash language: export PATH=x/y:$PATH - -calling "undo" finalization explicitely ------------------------------------------ - -At the end of function execution py.test invokes -a teardown hook which undoes all monkeypatch changes. -If you do not want to wait that long you can call -finalization explicitely:: - - monkeypatch.undo() - -This will undo previous changes. This call consumes the -undo stack. Calling it a second time has no effect unless -you start monkeypatching after the undo call. - -.. _`monkeypatch blog post`: http://tetamap.wordpress.com/2009/03/03/monkeypatching-in-unit-tests-done-right/ -""" - -import py, os, sys - -def pytest_funcarg__monkeypatch(request): - """The returned ``monkeypatch`` funcarg provides these - helper methods to modify objects, dictionaries or os.environ:: - - monkeypatch.setattr(obj, name, value, raising=True) - monkeypatch.delattr(obj, name, raising=True) - monkeypatch.setitem(mapping, name, value) - monkeypatch.delitem(obj, name, raising=True) - monkeypatch.setenv(name, value, prepend=False) - monkeypatch.delenv(name, value, raising=True) - monkeypatch.syspath_prepend(path) - - All modifications will be undone when the requesting - test function finished its execution. The ``raising`` - parameter determines if a KeyError or AttributeError - will be raised if the set/deletion operation has no target. - """ - monkeypatch = MonkeyPatch() - request.addfinalizer(monkeypatch.undo) - return monkeypatch - -notset = object() - -class MonkeyPatch: - def __init__(self): - self._setattr = [] - self._setitem = [] - - def setattr(self, obj, name, value, raising=True): - oldval = getattr(obj, name, notset) - if raising and oldval is notset: - raise AttributeError("%r has no attribute %r" %(obj, name)) - self._setattr.insert(0, (obj, name, oldval)) - setattr(obj, name, value) - - def delattr(self, obj, name, raising=True): - if not hasattr(obj, name): - if raising: - raise AttributeError(name) - else: - self._setattr.insert(0, (obj, name, getattr(obj, name, notset))) - delattr(obj, name) - - def setitem(self, dic, name, value): - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - dic[name] = value - - def delitem(self, dic, name, raising=True): - if name not in dic: - if raising: - raise KeyError(name) - else: - self._setitem.insert(0, (dic, name, dic.get(name, notset))) - del dic[name] - - def setenv(self, name, value, prepend=None): - value = str(value) - if prepend and name in os.environ: - value = value + prepend + os.environ[name] - self.setitem(os.environ, name, value) - - def delenv(self, name, raising=True): - self.delitem(os.environ, name, raising=raising) - - def syspath_prepend(self, path): - if not hasattr(self, '_savesyspath'): - self._savesyspath = sys.path[:] - sys.path.insert(0, str(path)) - - def undo(self): - for obj, name, value in self._setattr: - if value is not notset: - setattr(obj, name, value) - else: - delattr(obj, name) - self._setattr[:] = [] - for dictionary, name, value in self._setitem: - if value is notset: - del dictionary[name] - else: - dictionary[name] = value - self._setitem[:] = [] - if hasattr(self, '_savesyspath'): - sys.path[:] = self._savesyspath diff --git a/py/_code/oldmagic.py b/py/_code/oldmagic.py deleted file mode 100644 --- a/py/_code/oldmagic.py +++ /dev/null @@ -1,62 +0,0 @@ -""" deprecated module for turning on/off some features. """ - -import py - -from py.builtin import builtins as cpy_builtin - -def invoke(assertion=False, compile=False): - """ (deprecated) invoke magic, currently you can specify: - - assertion patches the builtin AssertionError to try to give - more meaningful AssertionErrors, which by means - of deploying a mini-interpreter constructs - a useful error message. - """ - py.log._apiwarn("1.1", - "py.magic.invoke() is deprecated, use py.code.patch_builtins()", - stacklevel=2, - ) - py.code.patch_builtins(assertion=assertion, compile=compile) - -def revoke(assertion=False, compile=False): - """ (deprecated) revoke previously invoked magic (see invoke()).""" - py.log._apiwarn("1.1", - "py.magic.revoke() is deprecated, use py.code.unpatch_builtins()", - stacklevel=2, - ) - py.code.unpatch_builtins(assertion=assertion, compile=compile) - -patched = {} - -def patch(namespace, name, value): - """ (deprecated) rebind the 'name' on the 'namespace' to the 'value', - possibly and remember the original value. Multiple - invocations to the same namespace/name pair will - remember a list of old values. - """ - py.log._apiwarn("1.1", - "py.magic.patch() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - orig = getattr(namespace, name) - patched.setdefault(nref, []).append(orig) - setattr(namespace, name, value) - return orig - -def revert(namespace, name): - """ (deprecated) revert to the orginal value the last patch modified. - Raise ValueError if no such original value exists. - """ - py.log._apiwarn("1.1", - "py.magic.revert() is deprecated, in tests use monkeypatch funcarg.", - stacklevel=2, - ) - nref = (namespace, name) - if nref not in patched or not patched[nref]: - raise ValueError("No original value stored for %s.%s" % nref) - current = getattr(namespace, name) - orig = patched[nref].pop() - setattr(namespace, name, orig) - return current - diff --git a/py/bin/win32/py.lookup.cmd b/py/bin/win32/py.lookup.cmd deleted file mode 100644 --- a/py/bin/win32/py.lookup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.lookup" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/rlib/rmmap.py b/pypy/rlib/rmmap.py --- a/pypy/rlib/rmmap.py +++ b/pypy/rlib/rmmap.py @@ -565,7 +565,11 @@ charp = rffi.cast(LPCSTR, data) self.setdata(charp, newsize) return - raise rwin32.lastWindowsError() + winerror = rwin32.lastWindowsError() + if self.map_handle: + rwin32.CloseHandle(self.map_handle) + self.map_handle = INVALID_HANDLE + raise winerror def len(self): self.check_valid() @@ -788,13 +792,17 @@ if m.map_handle: data = MapViewOfFile(m.map_handle, dwDesiredAccess, - offset_hi, offset_lo, 0) + offset_hi, offset_lo, length) if data: # XXX we should have a real LPVOID which must always be casted charp = rffi.cast(LPCSTR, data) m.setdata(charp, map_size) return m - raise rwin32.lastWindowsError() + winerror = rwin32.lastWindowsError() + if m.map_handle: + rwin32.CloseHandle(m.map_handle) + m.map_handle = INVALID_HANDLE + raise winerror def alloc(map_size): """Allocate memory. This is intended to be used by the JIT, diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/py/_plugin/pytest_hooklog.py b/py/_plugin/pytest_hooklog.py deleted file mode 100644 --- a/py/_plugin/pytest_hooklog.py +++ /dev/null @@ -1,33 +0,0 @@ -""" log invocations of extension hooks to a file. """ -import py - -def pytest_addoption(parser): - parser.addoption("--hooklog", dest="hooklog", default=None, - help="write hook calls to the given file.") - -def pytest_configure(config): - hooklog = config.getvalue("hooklog") - if hooklog: - config._hooklogfile = open(hooklog, 'w') - config._hooklog_oldperformcall = config.hook._performcall - config.hook._performcall = (lambda name, multicall: - logged_call(name=name, multicall=multicall, config=config)) - -def logged_call(name, multicall, config): - f = config._hooklogfile - f.write("%s(**%s)\n" % (name, multicall.kwargs)) - try: - res = config._hooklog_oldperformcall(name=name, multicall=multicall) - except: - f.write("-> exception") - raise - f.write("-> %r" % (res,)) - return res - -def pytest_unconfigure(config): - try: - del config.hook.__dict__['_performcall'] - except KeyError: - pass - else: - config._hooklogfile.close() diff --git a/py/_cmdline/pycleanup.py b/py/_cmdline/pycleanup.py deleted file mode 100755 --- a/py/_cmdline/pycleanup.py +++ /dev/null @@ -1,86 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.cleanup [PATH] ... - -Delete typical python development related files recursively under the specified PATH (which defaults to the current working directory). Don't follow links and don't recurse into directories with a dot. Optionally remove setup.py related files and empty -directories. - -""" -import py -import sys, subprocess - -def main(): - parser = py.std.optparse.OptionParser(usage=__doc__) - parser.add_option("-e", metavar="ENDING", - dest="endings", default=[".pyc", "$py.class"], action="append", - help=("(multi) recursively remove files with the given ending." - " '.pyc' and '$py.class' are in the default list.")) - parser.add_option("-d", action="store_true", dest="removedir", - help="remove empty directories.") - parser.add_option("-s", action="store_true", dest="setup", - help="remove 'build' and 'dist' directories next to setup.py files") - parser.add_option("-a", action="store_true", dest="all", - help="synonym for '-S -d -e pip-log.txt'") - parser.add_option("-n", "--dryrun", dest="dryrun", default=False, - action="store_true", - help="don't actually delete but display would-be-removed filenames.") - (options, args) = parser.parse_args() - - Cleanup(options, args).main() - -class Cleanup: - def __init__(self, options, args): - if not args: - args = ["."] - self.options = options - self.args = [py.path.local(x) for x in args] - if options.all: - options.setup = True - options.removedir = True - options.endings.append("pip-log.txt") - - def main(self): - if self.options.setup: - for arg in self.args: - self.setupclean(arg) - - for path in self.args: - py.builtin.print_("cleaning path", path, - "of extensions", self.options.endings) - for x in path.visit(self.shouldremove, self.recursedir): - self.remove(x) - if self.options.removedir: - for x in path.visit(lambda x: x.check(dir=1), self.recursedir): - if not x.listdir(): - self.remove(x) - - def shouldremove(self, p): - for ending in self.options.endings: - if p.basename.endswith(ending): - return True - - def recursedir(self, path): - return path.check(dotfile=0, link=0) - - def remove(self, path): - if not path.check(): - return - if self.options.dryrun: - py.builtin.print_("would remove", path) - else: - py.builtin.print_("removing", path) - path.remove() - - def XXXcallsetup(self, setup, *args): - old = setup.dirpath().chdir() - try: - subprocess.call([sys.executable, str(setup)] + list(args)) - finally: - old.chdir() - - def setupclean(self, path): - for x in path.visit("setup.py", self.recursedir): - basepath = x.dirpath() - self.remove(basepath / "build") - self.remove(basepath / "dist") diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/py/_plugin/pytest_assertion.py b/py/_plugin/pytest_assertion.py deleted file mode 100644 --- a/py/_plugin/pytest_assertion.py +++ /dev/null @@ -1,28 +0,0 @@ -import py -import sys - -def pytest_addoption(parser): - group = parser.getgroup("debugconfig") - group._addoption('--no-assert', action="store_true", default=False, - dest="noassert", - help="disable python assert expression reinterpretation."), - -def pytest_configure(config): - if not config.getvalue("noassert") and not config.getvalue("nomagic"): - warn_about_missing_assertion() - config._oldassertion = py.builtin.builtins.AssertionError - py.builtin.builtins.AssertionError = py.code._AssertionError - -def pytest_unconfigure(config): - if hasattr(config, '_oldassertion'): - py.builtin.builtins.AssertionError = config._oldassertion - del config._oldassertion - -def warn_about_missing_assertion(): - try: - assert False - except AssertionError: - pass - else: - py.std.warnings.warn("Assertions are turned off!" - " (are you using python -O?)") diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/py/_plugin/pytest_resultlog.py b/py/_plugin/pytest_resultlog.py deleted file mode 100644 --- a/py/_plugin/pytest_resultlog.py +++ /dev/null @@ -1,98 +0,0 @@ -"""non-xml machine-readable logging of test results. - Useful for buildbot integration code. See the `PyPy-test`_ - web page for post-processing. - -.. _`PyPy-test`: http://codespeak.net:8099/summary - -""" - -import py -from py.builtin import print_ - -def pytest_addoption(parser): - group = parser.getgroup("resultlog", "resultlog plugin options") - group.addoption('--resultlog', action="store", dest="resultlog", metavar="path", default=None, - help="path for machine-readable result log.") - -def pytest_configure(config): - resultlog = config.option.resultlog - if resultlog: - logfile = open(resultlog, 'w', 1) # line buffered - config._resultlog = ResultLog(config, logfile) - config.pluginmanager.register(config._resultlog) - -def pytest_unconfigure(config): - resultlog = getattr(config, '_resultlog', None) - if resultlog: - resultlog.logfile.close() - del config._resultlog - config.pluginmanager.unregister(resultlog) - -def generic_path(item): - chain = item.listchain() - gpath = [chain[0].name] - fspath = chain[0].fspath - fspart = False - for node in chain[1:]: - newfspath = node.fspath - if newfspath == fspath: - if fspart: - gpath.append(':') - fspart = False - else: - gpath.append('.') - else: - gpath.append('/') - fspart = True - name = node.name - if name[0] in '([': - gpath.pop() - gpath.append(name) - fspath = newfspath - return ''.join(gpath) - -class ResultLog(object): - def __init__(self, config, logfile): - self.config = config - self.logfile = logfile # preferably line buffered - - def write_log_entry(self, testpath, shortrepr, longrepr): - print_("%s %s" % (shortrepr, testpath), file=self.logfile) - for line in longrepr.splitlines(): - print_(" %s" % line, file=self.logfile) - - def log_outcome(self, node, shortrepr, longrepr): - testpath = generic_path(node) - self.write_log_entry(testpath, shortrepr, longrepr) - - def pytest_runtest_logreport(self, report): - res = self.config.hook.pytest_report_teststatus(report=report) - if res is not None: - code = res[1] - else: - code = report.shortrepr - if code == 'x': - longrepr = str(report.longrepr) - elif code == 'X': - longrepr = '' - elif report.passed: - longrepr = "" - elif report.failed: - longrepr = str(report.longrepr) - elif report.skipped: - longrepr = str(report.longrepr.reprcrash.message) - self.log_outcome(report.item, code, longrepr) - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - code = "F" - else: - assert report.skipped - code = "S" - longrepr = str(report.longrepr.reprcrash) - self.log_outcome(report.collector, code, longrepr) - - def pytest_internalerror(self, excrepr): - path = excrepr.reprcrash.path - self.write_log_entry(path, '!', str(excrepr)) diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -11,7 +11,7 @@ from pypy.rlib import rgc from pypy.rpython.lltypesystem import lltype, llmemory, rffi from pypy.rpython.lltypesystem.lloperation import llop -from pypy.rlib.jit import JitDriver, OPTIMIZER_SIMPLE, dont_look_inside +from pypy.rlib.jit import JitDriver, dont_look_inside from pypy.rlib.jit import purefunction, unroll_safe from pypy.jit.backend.x86.runner import CPU386 from pypy.jit.backend.llsupport.gc import GcRefList, GcRootMap_asmgcc @@ -87,7 +87,7 @@ ann.build_types(f, [s_list_of_strings], main_entry_point=True) t.buildrtyper().specialize() if kwds['jit']: - apply_jit(t, optimizer=OPTIMIZER_SIMPLE) + apply_jit(t, enable_opts='') cbuilder = genc.CStandaloneBuilder(t, f, t.config) cbuilder.generate_source() cbuilder.compile() @@ -159,7 +159,7 @@ x.foo = 5 return weakref.ref(x) def main_allfuncs(name, n, x): - num = name_to_func[name] + num = name_to_func[name] n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s = funcs[num][0](n, x) while n > 0: myjitdriver.can_enter_jit(num=num, n=n, x=x, x0=x0, x1=x1, @@ -428,7 +428,7 @@ def define_compile_framework_external_exception_handling(cls): def before(n, x): x = X(0) - return n, x, None, None, None, None, None, None, None, None, None, None + return n, x, None, None, None, None, None, None, None, None, None, None @dont_look_inside def g(x): @@ -460,7 +460,7 @@ def test_compile_framework_external_exception_handling(self): self.run('compile_framework_external_exception_handling') - + def define_compile_framework_bug1(self): @purefunction def nonmoving(): diff --git a/py/_test/pluginmanager.py b/py/_test/pluginmanager.py deleted file mode 100644 --- a/py/_test/pluginmanager.py +++ /dev/null @@ -1,353 +0,0 @@ -""" -managing loading and interacting with pytest plugins. -""" -import py -import inspect -from py._plugin import hookspec - -default_plugins = ( - "default runner capture mark terminal skipping tmpdir monkeypatch " - "recwarn pdb pastebin unittest helpconfig nose assertion genscript " - "junitxml doctest").split() - -def check_old_use(mod, modname): - clsname = modname[len('pytest_'):].capitalize() + "Plugin" - assert not hasattr(mod, clsname), (mod, clsname) - -class PluginManager(object): - def __init__(self): - self.registry = Registry() - self._name2plugin = {} - self._hints = [] - self.hook = HookRelay([hookspec], registry=self.registry) - self.register(self) - for spec in default_plugins: - self.import_plugin(spec) - - def _getpluginname(self, plugin, name): - if name is None: - if hasattr(plugin, '__name__'): - name = plugin.__name__.split(".")[-1] - else: - name = id(plugin) - return name - - def register(self, plugin, name=None): - assert not self.isregistered(plugin), plugin - assert not self.registry.isregistered(plugin), plugin - name = self._getpluginname(plugin, name) - if name in self._name2plugin: - return False - self._name2plugin[name] = plugin - self.call_plugin(plugin, "pytest_addhooks", {'pluginmanager': self}) - self.hook.pytest_plugin_registered(manager=self, plugin=plugin) - self.registry.register(plugin) - return True - - def unregister(self, plugin): - self.hook.pytest_plugin_unregistered(plugin=plugin) - self.registry.unregister(plugin) - for name, value in list(self._name2plugin.items()): - if value == plugin: - del self._name2plugin[name] - - def isregistered(self, plugin, name=None): - if self._getpluginname(plugin, name) in self._name2plugin: - return True - for val in self._name2plugin.values(): - if plugin == val: - return True - - def addhooks(self, spec): - self.hook._addhooks(spec, prefix="pytest_") - - def getplugins(self): - return list(self.registry) - - def skipifmissing(self, name): - if not self.hasplugin(name): - py.test.skip("plugin %r is missing" % name) - - def hasplugin(self, name): - try: - self.getplugin(name) - except KeyError: - return False - else: - return True - - def getplugin(self, name): - try: - return self._name2plugin[name] - except KeyError: - impname = canonical_importname(name) - return self._name2plugin[impname] - - # API for bootstrapping - # - def _envlist(self, varname): - val = py.std.os.environ.get(varname, None) - if val is not None: - return val.split(',') - return () - - def consider_env(self): - for spec in self._envlist("PYTEST_PLUGINS"): - self.import_plugin(spec) - - def consider_setuptools_entrypoints(self): - try: - from pkg_resources import iter_entry_points - except ImportError: - return # XXX issue a warning - for ep in iter_entry_points('pytest11'): - name = canonical_importname(ep.name) - if name in self._name2plugin: - continue - plugin = ep.load() - self.register(plugin, name=name) - - def consider_preparse(self, args): - for opt1,opt2 in zip(args, args[1:]): - if opt1 == "-p": - self.import_plugin(opt2) - - def consider_conftest(self, conftestmodule): - cls = getattr(conftestmodule, 'ConftestPlugin', None) - if cls is not None: - raise ValueError("%r: 'ConftestPlugins' only existed till 1.0.0b1, " - "were removed in 1.0.0b2" % (cls,)) - if self.register(conftestmodule, name=conftestmodule.__file__): - self.consider_module(conftestmodule) - - def consider_module(self, mod): - attr = getattr(mod, "pytest_plugins", ()) - if attr: - if not isinstance(attr, (list, tuple)): - attr = (attr,) - for spec in attr: - self.import_plugin(spec) - - def import_plugin(self, spec): - assert isinstance(spec, str) - modname = canonical_importname(spec) - if modname in self._name2plugin: - return - try: - mod = importplugin(modname) - except KeyboardInterrupt: - raise - except py.test.skip.Exception: - e = py.std.sys.exc_info()[1] - self._hints.append("skipped plugin %r: %s" %((modname, e.msg))) - else: - check_old_use(mod, modname) - self.register(mod) - self.consider_module(mod) - - def pytest_terminal_summary(self, terminalreporter): - tw = terminalreporter._tw - if terminalreporter.config.option.traceconfig: - for hint in self._hints: - tw.line("hint: %s" % hint) - - # - # - # API for interacting with registered and instantiated plugin objects - # - # - def listattr(self, attrname, plugins=None): - return self.registry.listattr(attrname, plugins=plugins) - - def notify_exception(self, excinfo=None): - if excinfo is None: - excinfo = py.code.ExceptionInfo() - excrepr = excinfo.getrepr(funcargs=True, showlocals=True) - return self.hook.pytest_internalerror(excrepr=excrepr) - - def do_addoption(self, parser): - mname = "pytest_addoption" - methods = self.registry.listattr(mname, reverse=True) - mc = MultiCall(methods, {'parser': parser}) - mc.execute() - - def pytest_plugin_registered(self, plugin): - dic = self.call_plugin(plugin, "pytest_namespace", {}) or {} - for name, value in dic.items(): - setattr(py.test, name, value) - py.test.__all__.append(name) - if hasattr(self, '_config'): - self.call_plugin(plugin, "pytest_addoption", - {'parser': self._config._parser}) - self.call_plugin(plugin, "pytest_configure", - {'config': self._config}) - - def call_plugin(self, plugin, methname, kwargs): - return MultiCall( - methods=self.listattr(methname, plugins=[plugin]), - kwargs=kwargs, firstresult=True).execute() - - def do_configure(self, config): - assert not hasattr(self, '_config') - self._config = config - config.hook.pytest_configure(config=self._config) - - def do_unconfigure(self, config): - config = self._config - del self._config - config.hook.pytest_unconfigure(config=config) - config.pluginmanager.unregister(self) - -def canonical_importname(name): - name = name.lower() - modprefix = "pytest_" - if not name.startswith(modprefix): - name = modprefix + name - return name - -def importplugin(importspec): - try: - return __import__(importspec) - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - try: - return __import__("py._plugin.%s" %(importspec), - None, None, '__doc__') - except ImportError: - e = py.std.sys.exc_info()[1] - if str(e).find(importspec) == -1: - raise - # show the original exception, not the failing internal one - return __import__(importspec) - - -class MultiCall: - """ execute a call into multiple python functions/methods. """ - - def __init__(self, methods, kwargs, firstresult=False): - self.methods = methods[:] - self.kwargs = kwargs.copy() - self.kwargs['__multicall__'] = self - self.results = [] - self.firstresult = firstresult - - def __repr__(self): - status = "%d results, %d meths" % (len(self.results), len(self.methods)) - return "" %(status, self.kwargs) - - def execute(self): - while self.methods: - method = self.methods.pop() - kwargs = self.getkwargs(method) - res = method(**kwargs) - if res is not None: - self.results.append(res) - if self.firstresult: - return res - if not self.firstresult: - return self.results - - def getkwargs(self, method): - kwargs = {} - for argname in varnames(method): - try: - kwargs[argname] = self.kwargs[argname] - except KeyError: - pass # might be optional param - return kwargs - -def varnames(func): - ismethod = inspect.ismethod(func) - rawcode = py.code.getrawcode(func) - try: - return rawcode.co_varnames[ismethod:] - except AttributeError: - return () - -class Registry: - """ - Manage Plugins: register/unregister call calls to plugins. - """ - def __init__(self, plugins=None): - if plugins is None: - plugins = [] - self._plugins = plugins - - def register(self, plugin): - assert not isinstance(plugin, str) - assert not plugin in self._plugins - self._plugins.append(plugin) - - def unregister(self, plugin): - self._plugins.remove(plugin) - - def isregistered(self, plugin): - return plugin in self._plugins - - def __iter__(self): - return iter(self._plugins) - - def listattr(self, attrname, plugins=None, reverse=False): - l = [] - if plugins is None: - plugins = self._plugins - for plugin in plugins: - try: - l.append(getattr(plugin, attrname)) - except AttributeError: - continue - if reverse: - l.reverse() - return l - -class HookRelay: - def __init__(self, hookspecs, registry, prefix="pytest_"): - if not isinstance(hookspecs, list): - hookspecs = [hookspecs] - self._hookspecs = [] - self._registry = registry - for hookspec in hookspecs: - self._addhooks(hookspec, prefix) - - def _addhooks(self, hookspecs, prefix): - self._hookspecs.append(hookspecs) - added = False - for name, method in vars(hookspecs).items(): - if name.startswith(prefix): - if not method.__doc__: - raise ValueError("docstring required for hook %r, in %r" - % (method, hookspecs)) - firstresult = getattr(method, 'firstresult', False) - hc = HookCaller(self, name, firstresult=firstresult) - setattr(self, name, hc) - added = True - #print ("setting new hook", name) - if not added: - raise ValueError("did not find new %r hooks in %r" %( - prefix, hookspecs,)) - - - def _performcall(self, name, multicall): - return multicall.execute() - -class HookCaller: - def __init__(self, hookrelay, name, firstresult): - self.hookrelay = hookrelay - self.name = name - self.firstresult = firstresult - - def __repr__(self): - return "" %(self.name,) - - def __call__(self, **kwargs): - methods = self.hookrelay._registry.listattr(self.name) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - - def pcall(self, plugins, **kwargs): - methods = self.hookrelay._registry.listattr(self.name, plugins=plugins) - mc = MultiCall(methods, kwargs, firstresult=self.firstresult) - return self.hookrelay._performcall(self.name, mc) - diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/py/_compat/dep_optparse.py b/py/_compat/dep_optparse.py deleted file mode 100644 --- a/py/_compat/dep_optparse.py +++ /dev/null @@ -1,4 +0,0 @@ -import py -py.log._apiwarn("1.1", "py.compat.optparse deprecated, use standard library version.", stacklevel="apipkg") - -optparse = py.std.optparse diff --git a/py/bin/win32/py.cleanup.cmd b/py/bin/win32/py.cleanup.cmd deleted file mode 100644 --- a/py/bin/win32/py.cleanup.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.cleanup" %* \ No newline at end of file diff --git a/py/_plugin/pytest_terminal.py b/py/_plugin/pytest_terminal.py deleted file mode 100644 --- a/py/_plugin/pytest_terminal.py +++ /dev/null @@ -1,540 +0,0 @@ -""" -Implements terminal reporting of the full testing process. - -This is a good source for looking at the various reporting hooks. -""" -import py -import sys - -optionalhook = py.test.mark.optionalhook - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting", "reporting", after="general") - group._addoption('-v', '--verbose', action="count", - dest="verbose", default=0, help="increase verbosity."), - group._addoption('-r', - action="store", dest="reportchars", default=None, metavar="chars", - help="show extra test summary info as specified by chars (f)ailed, " - "(s)skipped, (x)failed, (X)passed.") - group._addoption('-l', '--showlocals', - action="store_true", dest="showlocals", default=False, - help="show locals in tracebacks (disabled by default).") - group._addoption('--report', - action="store", dest="report", default=None, metavar="opts", - help="(deprecated, use -r)") - group._addoption('--tb', metavar="style", - action="store", dest="tbstyle", default='long', - type="choice", choices=['long', 'short', 'no', 'line'], - help="traceback print mode (long/short/line/no).") - group._addoption('--fulltrace', - action="store_true", dest="fulltrace", default=False, - help="don't cut any tracebacks (default is to cut).") - group._addoption('--funcargs', - action="store_true", dest="showfuncargs", default=False, - help="show available function arguments, sorted by plugin") - -def pytest_configure(config): - if config.option.collectonly: - reporter = CollectonlyReporter(config) - elif config.option.showfuncargs: - config.setsessionclass(ShowFuncargSession) - reporter = None - else: - reporter = TerminalReporter(config) - if reporter: - # XXX see remote.py's XXX - for attr in 'pytest_terminal_hasmarkup', 'pytest_terminal_fullwidth': - if hasattr(config, attr): - #print "SETTING TERMINAL OPTIONS", attr, getattr(config, attr) - name = attr.split("_")[-1] - assert hasattr(self.reporter._tw, name), name - setattr(reporter._tw, name, getattr(config, attr)) - config.pluginmanager.register(reporter, 'terminalreporter') - -def getreportopt(config): - reportopts = "" - optvalue = config.getvalue("report") - if optvalue: - py.builtin.print_("DEPRECATED: use -r instead of --report option.", - file=py.std.sys.stderr) - if optvalue: - for setting in optvalue.split(","): - setting = setting.strip() - if setting == "skipped": - reportopts += "s" - elif setting == "xfailed": - reportopts += "x" - reportchars = config.getvalue("reportchars") - if reportchars: - for char in reportchars: - if char not in reportopts: - reportopts += char - return reportopts - -class TerminalReporter: - def __init__(self, config, file=None): - self.config = config - self.stats = {} - self.curdir = py.path.local() - if file is None: - file = py.std.sys.stdout - self._tw = py.io.TerminalWriter(file) - self.currentfspath = None - self.gateway2info = {} - self.reportchars = getreportopt(config) - - def hasopt(self, char): - char = {'xfailed': 'x', 'skipped': 's'}.get(char,char) - return char in self.reportchars - - def write_fspath_result(self, fspath, res): - fspath = self.curdir.bestrelpath(fspath) - if fspath != self.currentfspath: - self._tw.line() - relpath = self.curdir.bestrelpath(fspath) - self._tw.write(relpath + " ") - self.currentfspath = fspath - self._tw.write(res) - - def write_ensure_prefix(self, prefix, extra="", **kwargs): - if self.currentfspath != prefix: - self._tw.line() - self.currentfspath = prefix - self._tw.write(prefix) - if extra: - self._tw.write(extra, **kwargs) - self.currentfspath = -2 - - def ensure_newline(self): - if self.currentfspath: - self._tw.line() - self.currentfspath = None - - def write_line(self, line, **markup): - line = str(line) - self.ensure_newline() - self._tw.line(line, **markup) - - def write_sep(self, sep, title=None, **markup): - self.ensure_newline() - self._tw.sep(sep, title, **markup) - - def getcategoryletterword(self, rep): - res = self.config.hook.pytest_report_teststatus(report=rep) - if res: - return res - for cat in 'skipped failed passed ???'.split(): - if getattr(rep, cat, None): - break - return cat, self.getoutcomeletter(rep), self.getoutcomeword(rep) - - def getoutcomeletter(self, rep): - return rep.shortrepr - - def getoutcomeword(self, rep): - if rep.passed: - return "PASS", dict(green=True) - elif rep.failed: - return "FAIL", dict(red=True) - elif rep.skipped: - return "SKIP" - else: - return "???", dict(red=True) - - def gettestid(self, item, relative=True): - fspath = item.fspath - chain = [x for x in item.listchain() if x.fspath == fspath] - chain = chain[1:] - names = [x.name for x in chain if x.name != "()"] - path = item.fspath - if relative: - relpath = path.relto(self.curdir) - if relpath: - path = relpath - names.insert(0, str(path)) - return "::".join(names) - - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.write_line("INTERNALERROR> " + line) - - def pytest_plugin_registered(self, plugin): - if self.config.option.traceconfig: - msg = "PLUGIN registered: %s" %(plugin,) - # XXX this event may happen during setup/teardown time - # which unfortunately captures our output here - # which garbles our output if we use self.write_line - self.write_line(msg) - - @optionalhook - def pytest_gwmanage_newgateway(self, gateway, platinfo): - #self.write_line("%s instantiated gateway from spec %r" %(gateway.id, gateway.spec._spec)) - d = {} - d['version'] = repr_pythonversion(platinfo.version_info) - d['id'] = gateway.id - d['spec'] = gateway.spec._spec - d['platform'] = platinfo.platform - if self.config.option.verbose: - d['extra'] = "- " + platinfo.executable - else: - d['extra'] = "" - d['cwd'] = platinfo.cwd - infoline = ("[%(id)s] %(spec)s -- platform %(platform)s, " - "Python %(version)s " - "cwd: %(cwd)s" - "%(extra)s" % d) - self.write_line(infoline) - self.gateway2info[gateway] = infoline - - @optionalhook - def pytest_testnodeready(self, node): - self.write_line("[%s] txnode ready to receive tests" %(node.gateway.id,)) - - @optionalhook - def pytest_testnodedown(self, node, error): - if error: - self.write_line("[%s] node down, error: %s" %(node.gateway.id, error)) - - @optionalhook - def pytest_rescheduleitems(self, items): - if self.config.option.debug: - self.write_sep("!", "RESCHEDULING %s " %(items,)) - - @optionalhook - def pytest_looponfailinfo(self, failreports, rootdirs): - if failreports: - self.write_sep("#", "LOOPONFAILING", red=True) - for report in failreports: - loc = self._getcrashline(report) - self.write_line(loc, red=True) - self.write_sep("#", "waiting for changes") - for rootdir in rootdirs: - self.write_line("### Watching: %s" %(rootdir,), bold=True) - - - def pytest_trace(self, category, msg): - if self.config.option.debug or \ - self.config.option.traceconfig and category.find("config") != -1: - self.write_line("[%s] %s" %(category, msg)) - - def pytest_deselected(self, items): - self.stats.setdefault('deselected', []).append(items) - - def pytest_itemstart(self, item, node=None): - if getattr(self.config.option, 'dist', 'no') != "no": - # for dist-testing situations itemstart means we - # queued the item for sending, not interesting (unless debugging) - if self.config.option.debug: - line = self._reportinfoline(item) - extra = "" - if node: - extra = "-> [%s]" % node.gateway.id - self.write_ensure_prefix(line, extra) - else: - if self.config.option.verbose: - line = self._reportinfoline(item) - self.write_ensure_prefix(line, "") - else: - # ensure that the path is printed before the - # 1st test of a module starts running - - self.write_fspath_result(self._getfspath(item), "") - - def pytest__teardown_final_logerror(self, report): - self.stats.setdefault("error", []).append(report) - - def pytest_runtest_logreport(self, report): - rep = report - cat, letter, word = self.getcategoryletterword(rep) - if not letter and not word: - # probably passed setup/teardown - return - if isinstance(word, tuple): - word, markup = word - else: - markup = {} - self.stats.setdefault(cat, []).append(rep) - if not self.config.option.verbose: - self.write_fspath_result(self._getfspath(rep.item), letter) - else: - line = self._reportinfoline(rep.item) - if not hasattr(rep, 'node'): - self.write_ensure_prefix(line, word, **markup) - else: - self.ensure_newline() - if hasattr(rep, 'node'): - self._tw.write("[%s] " % rep.node.gateway.id) - self._tw.write(word, **markup) - self._tw.write(" " + line) - self.currentfspath = -2 - - def pytest_collectreport(self, report): - if not report.passed: - if report.failed: - self.stats.setdefault("error", []).append(report) - msg = report.longrepr.reprcrash.message - self.write_fspath_result(report.collector.fspath, "E") - elif report.skipped: - self.stats.setdefault("skipped", []).append(report) - self.write_fspath_result(report.collector.fspath, "S") - - def pytest_sessionstart(self, session): - self.write_sep("=", "test session starts", bold=True) - self._sessionstarttime = py.std.time.time() - - verinfo = ".".join(map(str, sys.version_info[:3])) - msg = "platform %s -- Python %s" % (sys.platform, verinfo) - msg += " -- pytest-%s" % (py.__version__) - if self.config.option.verbose or self.config.option.debug or getattr(self.config.option, 'pastebin', None): - msg += " -- " + str(sys.executable) - self.write_line(msg) - lines = self.config.hook.pytest_report_header(config=self.config) - lines.reverse() - for line in flatten(lines): - self.write_line(line) - for i, testarg in enumerate(self.config.args): - self.write_line("test object %d: %s" %(i+1, testarg)) - - def pytest_sessionfinish(self, exitstatus, __multicall__): - __multicall__.execute() - self._tw.line("") - if exitstatus in (0, 1, 2): - self.summary_errors() - self.summary_failures() - self.config.hook.pytest_terminal_summary(terminalreporter=self) - if exitstatus == 2: - self._report_keyboardinterrupt() - self.summary_deselected() - self.summary_stats() - - def pytest_keyboard_interrupt(self, excinfo): - self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) - - def _report_keyboardinterrupt(self): - excrepr = self._keyboardinterrupt_memo - msg = excrepr.reprcrash.message - self.write_sep("!", msg) - if "KeyboardInterrupt" in msg: - if self.config.getvalue("fulltrace"): - excrepr.toterminal(self._tw) - else: - excrepr.reprcrash.toterminal(self._tw) - - def _getcrashline(self, report): - try: - return report.longrepr.reprcrash - except AttributeError: - return str(report.longrepr)[:50] - - def _reportinfoline(self, item): - collect_fspath = self._getfspath(item) - fspath, lineno, msg = self._getreportinfo(item) - if fspath and fspath != collect_fspath: - fspath = "%s <- %s" % ( - self.curdir.bestrelpath(collect_fspath), - self.curdir.bestrelpath(fspath)) - elif fspath: - fspath = self.curdir.bestrelpath(fspath) - if lineno is not None: - lineno += 1 - if fspath and lineno and msg: - line = "%(fspath)s:%(lineno)s: %(msg)s" - elif fspath and msg: - line = "%(fspath)s: %(msg)s" - elif fspath and lineno: - line = "%(fspath)s:%(lineno)s %(extrapath)s" - else: - line = "[noreportinfo]" - return line % locals() + " " - - def _getfailureheadline(self, rep): - if hasattr(rep, "collector"): - return str(rep.collector.fspath) - elif hasattr(rep, 'item'): - fspath, lineno, msg = self._getreportinfo(rep.item) - return msg - else: - return "test session" - - def _getreportinfo(self, item): - try: - return item.__reportinfo - except AttributeError: - pass - reportinfo = item.config.hook.pytest_report_iteminfo(item=item) - # cache on item - item.__reportinfo = reportinfo - return reportinfo - - def _getfspath(self, item): - try: - return item.fspath - except AttributeError: - fspath, lineno, msg = self._getreportinfo(item) - return fspath - - # - # summaries for sessionfinish - # - - def summary_failures(self): - tbstyle = self.config.getvalue("tbstyle") - if 'failed' in self.stats and tbstyle != "no": - self.write_sep("=", "FAILURES") - for rep in self.stats['failed']: - if tbstyle == "line": - line = self._getcrashline(rep) - self.write_line(line) - else: - msg = self._getfailureheadline(rep) - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def summary_errors(self): - if 'error' in self.stats and self.config.option.tbstyle != "no": - self.write_sep("=", "ERRORS") - for rep in self.stats['error']: - msg = self._getfailureheadline(rep) - if not hasattr(rep, 'when'): - # collect - msg = "ERROR during collection " + msg - elif rep.when == "setup": - msg = "ERROR at setup of " + msg - elif rep.when == "teardown": - msg = "ERROR at teardown of " + msg - self.write_sep("_", msg) - self.write_platinfo(rep) - rep.toterminal(self._tw) - - def write_platinfo(self, rep): - if hasattr(rep, 'node'): - self.write_line(self.gateway2info.get( - rep.node.gateway, - "node %r (platinfo not found? strange)") - [:self._tw.fullwidth-1]) - - def summary_stats(self): - session_duration = py.std.time.time() - self._sessionstarttime - - keys = "failed passed skipped deselected".split() - for key in self.stats.keys(): - if key not in keys: - keys.append(key) - parts = [] - for key in keys: - val = self.stats.get(key, None) - if val: - parts.append("%d %s" %(len(val), key)) - line = ", ".join(parts) - # XXX coloring - self.write_sep("=", "%s in %.2f seconds" %(line, session_duration)) - - def summary_deselected(self): - if 'deselected' in self.stats: - self.write_sep("=", "%d tests deselected by %r" %( - len(self.stats['deselected']), self.config.option.keyword), bold=True) - - -class CollectonlyReporter: - INDENT = " " - - def __init__(self, config, out=None): - self.config = config - if out is None: - out = py.std.sys.stdout - self.out = py.io.TerminalWriter(out) - self.indent = "" - self._failed = [] - - def outindent(self, line): - self.out.line(self.indent + str(line)) - - def pytest_internalerror(self, excrepr): - for line in str(excrepr).split("\n"): - self.out.line("INTERNALERROR> " + line) - - def pytest_collectstart(self, collector): - self.outindent(collector) - self.indent += self.INDENT - - def pytest_itemstart(self, item, node=None): - self.outindent(item) - - def pytest_collectreport(self, report): - if not report.passed: - self.outindent("!!! %s !!!" % report.longrepr.reprcrash.message) - self._failed.append(report) - self.indent = self.indent[:-len(self.INDENT)] - - def pytest_sessionfinish(self, session, exitstatus): - if self._failed: - self.out.sep("!", "collection failures") - for rep in self._failed: - rep.toterminal(self.out) - - -def repr_pythonversion(v=None): - if v is None: - v = sys.version_info - try: - return "%s.%s.%s-%s-%s" % v - except (TypeError, ValueError): - return str(v) - -def flatten(l): - for x in l: - if isinstance(x, (list, tuple)): - for y in flatten(x): - yield y - else: - yield x - -from py._test.session import Session -class ShowFuncargSession(Session): - def main(self, colitems): - self.fspath = py.path.local() - self.sessionstarts() - try: - self.showargs(colitems[0]) - finally: - self.sessionfinishes(exitstatus=1) - - def showargs(self, colitem): - tw = py.io.TerminalWriter() - from py._test.funcargs import getplugins - from py._test.funcargs import FuncargRequest - plugins = getplugins(colitem, withpy=True) - verbose = self.config.getvalue("verbose") - for plugin in plugins: - available = [] - for name, factory in vars(plugin).items(): - if name.startswith(FuncargRequest._argprefix): - name = name[len(FuncargRequest._argprefix):] - if name not in available: - available.append([name, factory]) - if available: - pluginname = plugin.__name__ - for name, factory in available: - loc = self.getlocation(factory) - if verbose: - funcargspec = "%s -- %s" %(name, loc,) - else: - funcargspec = name - tw.line(funcargspec, green=True) - doc = factory.__doc__ or "" - if doc: - for line in doc.split("\n"): - tw.line(" " + line.strip()) - else: - tw.line(" %s: no docstring available" %(loc,), - red=True) - - def getlocation(self, function): - import inspect - fn = py.path.local(inspect.getfile(function)) - lineno = py.builtin._getcode(function).co_firstlineno - if fn.relto(self.fspath): - fn = fn.relto(self.fspath) - return "%s:%d" %(fn, lineno+1) diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/py/_test/cmdline.py b/py/_test/cmdline.py deleted file mode 100644 --- a/py/_test/cmdline.py +++ /dev/null @@ -1,24 +0,0 @@ -import py -import sys - -# -# main entry point -# - -def main(args=None): - if args is None: - args = sys.argv[1:] - config = py.test.config - try: - config.parse(args) - config.pluginmanager.do_configure(config) - session = config.initsession() - colitems = config.getinitialnodes() - exitstatus = session.main(colitems) - config.pluginmanager.do_unconfigure(config) - except config.Error: - e = sys.exc_info()[1] - sys.stderr.write("ERROR: %s\n" %(e.args[0],)) - exitstatus = 3 - py.test.config = py.test.config.__class__() - return exitstatus diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/py/_cmdline/pywhich.py b/py/_cmdline/pywhich.py deleted file mode 100755 --- a/py/_cmdline/pywhich.py +++ /dev/null @@ -1,23 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.which [name] - -print the location of the given python module or package name -""" - -import sys - -def main(): - name = sys.argv[1] - try: - mod = __import__(name) - except ImportError: - sys.stderr.write("could not import: " + name + "\n") - else: - try: - location = mod.__file__ - except AttributeError: - sys.stderr.write("module (has no __file__): " + str(mod)) - else: - print(location) diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/py/bin/win32/py.convert_unittest.cmd b/py/bin/win32/py.convert_unittest.cmd deleted file mode 100644 --- a/py/bin/win32/py.convert_unittest.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.convert_unittest" %* \ No newline at end of file diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -10,6 +10,7 @@ from pypy.rlib.objectmodel import we_are_translated, specialize from pypy.rlib.debug import have_debug_prints, ll_assert from pypy.rlib.debug import debug_start, debug_stop, debug_print +from pypy.jit.metainterp.optimizeutil import InvalidLoop # Logic to encode the chain of frames and the state of the boxes at a # guard operation, and to decode it again. This is a bit advanced, @@ -427,12 +428,24 @@ # raise NotImplementedError def equals(self, fieldnums): return tagged_list_eq(self.fieldnums, fieldnums) + def set_content(self, fieldnums): self.fieldnums = fieldnums def debug_prints(self): raise NotImplementedError + def generalization_of(self, other): + raise NotImplementedError + + def generate_guards(self, other, box, cpu, extra_guards): + if self.generalization_of(other): + return + self._generate_guards(other, box, cpu, extra_guards) + + def _generate_guards(self, other, box, cpu, extra_guards): + raise InvalidLoop + class AbstractVirtualStructInfo(AbstractVirtualInfo): def __init__(self, fielddescrs): self.fielddescrs = fielddescrs @@ -452,6 +465,26 @@ str(self.fielddescrs[i]), str(untag(self.fieldnums[i]))) + def generalization_of(self, other): + if not self._generalization_of(other): + return False + assert len(self.fielddescrs) == len(self.fieldstate) + assert len(other.fielddescrs) == len(other.fieldstate) + if len(self.fielddescrs) != len(other.fielddescrs): + return False + + for i in range(len(self.fielddescrs)): + if other.fielddescrs[i] is not self.fielddescrs[i]: + return False + if not self.fieldstate[i].generalization_of(other.fieldstate[i]): + return False + + return True + + def _generalization_of(self, other): + raise NotImplementedError + + class VirtualInfo(AbstractVirtualStructInfo): def __init__(self, known_class, fielddescrs): AbstractVirtualStructInfo.__init__(self, fielddescrs) @@ -467,6 +500,14 @@ debug_print("\tvirtualinfo", self.known_class.repr_rpython()) AbstractVirtualStructInfo.debug_prints(self) + def _generalization_of(self, other): + if not isinstance(other, VirtualInfo): + return False + if not self.known_class.same_constant(other.known_class): + return False + return True + + class VStructInfo(AbstractVirtualStructInfo): def __init__(self, typedescr, fielddescrs): AbstractVirtualStructInfo.__init__(self, fielddescrs) @@ -482,6 +523,14 @@ debug_print("\tvstructinfo", self.typedescr.repr_rpython()) AbstractVirtualStructInfo.debug_prints(self) + def _generalization_of(self, other): + if not isinstance(other, VStructInfo): + return False + if self.typedescr is not other.typedescr: + return False + return True + + class VArrayInfo(AbstractVirtualInfo): def __init__(self, arraydescr): self.arraydescr = arraydescr @@ -513,6 +562,16 @@ for i in self.fieldnums: debug_print("\t\t", str(untag(i))) + def generalization_of(self, other): + if self.arraydescr is not other.arraydescr: + return False + if len(self.fieldstate) != len(other.fieldstate): + return False + for i in range(len(self.fieldstate)): + if not self.fieldstate[i].generalization_of(other.fieldstate[i]): + return False + return True + class VStrPlainInfo(AbstractVirtualInfo): """Stands for the string made out of the characters of all fieldnums.""" @@ -647,6 +706,7 @@ # Note that this may be called recursively; that's why the # allocate() methods must fill in the cache as soon as they # have the object, before they fill its fields. + assert self.virtuals_cache is not None v = self.virtuals_cache[index] if not v: v = self.rd_virtuals[index].allocate(self, index) diff --git a/py/_plugin/pytest_runner.py b/py/_plugin/pytest_runner.py deleted file mode 100644 --- a/py/_plugin/pytest_runner.py +++ /dev/null @@ -1,417 +0,0 @@ -""" -collect and run test items and create reports. -""" - -import py, sys - -def pytest_namespace(): - return { - 'raises' : raises, - 'skip' : skip, - 'importorskip' : importorskip, - 'fail' : fail, - 'xfail' : xfail, - 'exit' : exit, - } - -# -# pytest plugin hooks - -# XXX move to pytest_sessionstart and fix py.test owns tests -def pytest_configure(config): - config._setupstate = SetupState() - -def pytest_sessionfinish(session, exitstatus): - if hasattr(session.config, '_setupstate'): - hook = session.config.hook - rep = hook.pytest__teardown_final(session=session) - if rep: - hook.pytest__teardown_final_logerror(report=rep) - -def pytest_make_collect_report(collector): - result = excinfo = None - try: - result = collector._memocollect() - except KeyboardInterrupt: - raise - except: - excinfo = py.code.ExceptionInfo() - return CollectReport(collector, result, excinfo) - -def pytest_runtest_protocol(item): - runtestprotocol(item) - return True - -def runtestprotocol(item, log=True): - rep = call_and_report(item, "setup", log) - reports = [rep] - if rep.passed: - reports.append(call_and_report(item, "call", log)) - reports.append(call_and_report(item, "teardown", log)) - return reports - -def pytest_runtest_setup(item): - item.config._setupstate.prepare(item) - -def pytest_runtest_call(item): - if not item._deprecated_testexecution(): - item.runtest() - -def pytest_runtest_makereport(item, call): - return ItemTestReport(item, call.excinfo, call.when) - -def pytest_runtest_teardown(item): - item.config._setupstate.teardown_exact(item) - -def pytest__teardown_final(session): - call = CallInfo(session.config._setupstate.teardown_all, when="teardown") - if call.excinfo: - ntraceback = call.excinfo.traceback .cut(excludepath=py._pydir) - call.excinfo.traceback = ntraceback.filter() - rep = TeardownErrorReport(call.excinfo) - return rep - -def pytest_report_teststatus(report): - if report.when in ("setup", "teardown"): - if report.failed: - # category, shortletter, verbose-word - return "error", "E", "ERROR" - elif report.skipped: - return "skipped", "s", "SKIPPED" - else: - return "", "", "" -# -# Implementation - -def call_and_report(item, when, log=True): - call = call_runtest_hook(item, when) - hook = item.ihook - report = hook.pytest_runtest_makereport(item=item, call=call) - if log and (when == "call" or not report.passed): - hook.pytest_runtest_logreport(report=report) - return report - -def call_runtest_hook(item, when): - hookname = "pytest_runtest_" + when - ihook = getattr(item.ihook, hookname) - return CallInfo(lambda: ihook(item=item), when=when) - -class CallInfo: - excinfo = None - def __init__(self, func, when): - self.when = when - try: - self.result = func() - except KeyboardInterrupt: - raise - except: - self.excinfo = py.code.ExceptionInfo() - - def __repr__(self): - if self.excinfo: - status = "exception: %s" % str(self.excinfo.value) - else: - status = "result: %r" % (self.result,) - return "" % (self.when, status) - -class BaseReport(object): - def __repr__(self): - l = ["%s=%s" %(key, value) - for key, value in self.__dict__.items()] - return "<%s %s>" %(self.__class__.__name__, " ".join(l),) - - def toterminal(self, out): - longrepr = self.longrepr - if hasattr(longrepr, 'toterminal'): - longrepr.toterminal(out) - else: - out.line(str(longrepr)) - -class ItemTestReport(BaseReport): - failed = passed = skipped = False - - def __init__(self, item, excinfo=None, when=None): - self.item = item - self.when = when - if item and when != "setup": - self.keywords = item.readkeywords() - else: - # if we fail during setup it might mean - # we are not able to access the underlying object - # this might e.g. happen if we are unpickled - # and our parent collector did not collect us - # (because it e.g. skipped for platform reasons) - self.keywords = {} - if not excinfo: - self.passed = True - self.shortrepr = "." - else: - if not isinstance(excinfo, py.code.ExceptionInfo): - self.failed = True - shortrepr = "?" - longrepr = excinfo - elif excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - shortrepr = "s" - longrepr = self.item._repr_failure_py(excinfo) - else: - self.failed = True - shortrepr = self.item.shortfailurerepr - if self.when == "call": - longrepr = self.item.repr_failure(excinfo) - else: # exception in setup or teardown - longrepr = self.item._repr_failure_py(excinfo) - shortrepr = shortrepr.lower() - self.shortrepr = shortrepr - self.longrepr = longrepr - - def __repr__(self): - status = (self.passed and "passed" or - self.skipped and "skipped" or - self.failed and "failed" or - "CORRUPT") - l = [repr(self.item.name), "when=%r" % self.when, "outcome %r" % status,] - if hasattr(self, 'node'): - l.append("txnode=%s" % self.node.gateway.id) - info = " " .join(map(str, l)) - return "" % info - - def getnode(self): - return self.item - -class CollectReport(BaseReport): - skipped = failed = passed = False - - def __init__(self, collector, result, excinfo=None): - self.collector = collector - if not excinfo: - self.passed = True - self.result = result - else: - style = "short" - if collector.config.getvalue("fulltrace"): - style = "long" - self.longrepr = self.collector._repr_failure_py(excinfo, - style=style) - if excinfo.errisinstance(py.test.skip.Exception): - self.skipped = True - self.reason = str(excinfo.value) - else: - self.failed = True - - def getnode(self): - return self.collector - -class TeardownErrorReport(BaseReport): - skipped = passed = False - failed = True - when = "teardown" - def __init__(self, excinfo): - self.longrepr = excinfo.getrepr(funcargs=True) - -class SetupState(object): - """ shared state for setting up/tearing down test items or collectors. """ - def __init__(self): - self.stack = [] - self._finalizers = {} - - def addfinalizer(self, finalizer, colitem): - """ attach a finalizer to the given colitem. - if colitem is None, this will add a finalizer that - is called at the end of teardown_all(). - """ - assert hasattr(finalizer, '__call__') - #assert colitem in self.stack - self._finalizers.setdefault(colitem, []).append(finalizer) - - def _pop_and_teardown(self): - colitem = self.stack.pop() - self._teardown_with_finalization(colitem) - - def _callfinalizers(self, colitem): - finalizers = self._finalizers.pop(colitem, None) - while finalizers: - fin = finalizers.pop() - fin() - - def _teardown_with_finalization(self, colitem): - self._callfinalizers(colitem) - if colitem: - colitem.teardown() - for colitem in self._finalizers: - assert colitem is None or colitem in self.stack - - def teardown_all(self): - while self.stack: - self._pop_and_teardown() - self._teardown_with_finalization(None) - assert not self._finalizers - - def teardown_exact(self, item): - if self.stack and item == self.stack[-1]: - self._pop_and_teardown() - else: - self._callfinalizers(item) - - def prepare(self, colitem): - """ setup objects along the collector chain to the test-method - and teardown previously setup objects.""" - needed_collectors = colitem.listchain() - while self.stack: - if self.stack == needed_collectors[:len(self.stack)]: - break - self._pop_and_teardown() - # check if the last collection node has raised an error - for col in self.stack: - if hasattr(col, '_prepare_exc'): - py.builtin._reraise(*col._prepare_exc) - for col in needed_collectors[len(self.stack):]: - self.stack.append(col) - try: - col.setup() - except Exception: - col._prepare_exc = sys.exc_info() - raise - -# ============================================================= -# Test OutcomeExceptions and helpers for creating them. - - -class OutcomeException(Exception): - """ OutcomeException and its subclass instances indicate and - contain info about test and collection outcomes. - """ - def __init__(self, msg=None, excinfo=None): - self.msg = msg - self.excinfo = excinfo - - def __repr__(self): - if self.msg: - return repr(self.msg) - return "<%s instance>" %(self.__class__.__name__,) - __str__ = __repr__ - -class Skipped(OutcomeException): - # XXX hackish: on 3k we fake to live in the builtins - # in order to have Skipped exception printing shorter/nicer - __module__ = 'builtins' - -class Failed(OutcomeException): - """ raised from an explicit call to py.test.fail() """ - __module__ = 'builtins' - -class XFailed(OutcomeException): - """ raised from an explicit call to py.test.xfail() """ - __module__ = 'builtins' - -class ExceptionFailure(Failed): - """ raised by py.test.raises on an exception-assertion mismatch. """ - def __init__(self, expr, expected, msg=None, excinfo=None): - Failed.__init__(self, msg=msg, excinfo=excinfo) - self.expr = expr - self.expected = expected - -class Exit(KeyboardInterrupt): - """ raised by py.test.exit for immediate program exits without tracebacks and reporter/summary. """ - def __init__(self, msg="unknown reason"): - self.msg = msg - KeyboardInterrupt.__init__(self, msg) - -# exposed helper methods - -def exit(msg): - """ exit testing process as if KeyboardInterrupt was triggered. """ - __tracebackhide__ = True - raise Exit(msg) - -exit.Exception = Exit - -def skip(msg=""): - """ skip an executing test with the given message. Note: it's usually - better use the py.test.mark.skipif marker to declare a test to be - skipped under certain conditions like mismatching platforms or - dependencies. See the pytest_skipping plugin for details. - """ - __tracebackhide__ = True - raise Skipped(msg=msg) - -skip.Exception = Skipped - -def fail(msg=""): - """ explicitely fail an currently-executing test with the given Message. """ - __tracebackhide__ = True - raise Failed(msg=msg) - -fail.Exception = Failed - -def xfail(reason=""): - """ xfail an executing test or setup functions, taking an optional - reason string. - """ - __tracebackhide__ = True - raise XFailed(reason) -xfail.Exception = XFailed - -def raises(ExpectedException, *args, **kwargs): - """ if args[0] is callable: raise AssertionError if calling it with - the remaining arguments does not raise the expected exception. - if args[0] is a string: raise AssertionError if executing the - the string in the calling scope does not raise expected exception. - for examples: - x = 5 - raises(TypeError, lambda x: x + 'hello', x=x) - raises(TypeError, "x + 'hello'") - """ - __tracebackhide__ = True - assert args - if isinstance(args[0], str): - code, = args - assert isinstance(code, str) - frame = sys._getframe(1) - loc = frame.f_locals.copy() - loc.update(kwargs) - #print "raises frame scope: %r" % frame.f_locals - try: - code = py.code.Source(code).compile() - py.builtin.exec_(code, frame.f_globals, loc) - # XXX didn'T mean f_globals == f_locals something special? - # this is destroyed here ... - except ExpectedException: - return py.code.ExceptionInfo() - else: - func = args[0] - try: - func(*args[1:], **kwargs) - except ExpectedException: - return py.code.ExceptionInfo() - k = ", ".join(["%s=%r" % x for x in kwargs.items()]) - if k: - k = ', ' + k - expr = '%s(%r%s)' %(getattr(func, '__name__', func), args, k) - raise ExceptionFailure(msg="DID NOT RAISE", - expr=args, expected=ExpectedException) - -raises.Exception = ExceptionFailure - -def importorskip(modname, minversion=None): - """ return imported module if it has a higher __version__ than the - optionally specified 'minversion' - otherwise call py.test.skip() - with a message detailing the mismatch. - """ - compile(modname, '', 'eval') # to catch syntaxerrors - try: - mod = __import__(modname, None, None, ['__doc__']) - except ImportError: - py.test.skip("could not import %r" %(modname,)) - if minversion is None: - return mod - verattr = getattr(mod, '__version__', None) - if isinstance(minversion, str): - minver = minversion.split(".") - else: - minver = list(minversion) - if verattr is None or verattr.split(".") < minver: - py.test.skip("module %r has __version__ %r, required is: %r" %( - modname, verattr, minversion)) - return mod - diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/rpython/lltypesystem/ll2ctypes.py b/pypy/rpython/lltypesystem/ll2ctypes.py --- a/pypy/rpython/lltypesystem/ll2ctypes.py +++ b/pypy/rpython/lltypesystem/ll2ctypes.py @@ -1028,7 +1028,10 @@ funcname, place)) # get_ctypes_type() can raise NotImplementedError too - cfunc.argtypes = [get_ctypes_type(T) for T in FUNCTYPE.ARGS + from pypy.rpython.lltypesystem import rffi + cfunc.argtypes = [get_ctypes_type(T) if T is not rffi.VOIDP + else ctypes.c_void_p + for T in FUNCTYPE.ARGS if not T is lltype.Void] if FUNCTYPE.RESULT is lltype.Void: cfunc.restype = None diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/py/_compat/dep_doctest.py b/py/_compat/dep_doctest.py deleted file mode 100644 --- a/py/_compat/dep_doctest.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.doctest deprecated, use standard library version.", -stacklevel="apipkg") -doctest = py.std.doctest diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/py/bin/win32/py.svnwcrevert.cmd b/py/bin/win32/py.svnwcrevert.cmd deleted file mode 100644 --- a/py/bin/win32/py.svnwcrevert.cmd +++ /dev/null @@ -1,2 +0,0 @@ - at echo off -python "%~dp0\..\py.svnwcrevert" %* \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/py/_plugin/pytest_pastebin.py b/py/_plugin/pytest_pastebin.py deleted file mode 100644 --- a/py/_plugin/pytest_pastebin.py +++ /dev/null @@ -1,83 +0,0 @@ -""" -submit failure or test session information to a pastebin service. - -Usage ----------- - -**Creating a URL for each test failure**:: - - py.test --pastebin=failed - -This will submit test run information to a remote Paste service and -provide a URL for each failure. You may select tests as usual or add -for example ``-x`` if you only want to send one particular failure. - -**Creating a URL for a whole test session log**:: - - py.test --pastebin=all - -Currently only pasting to the http://paste.pocoo.org service is implemented. - -""" -import py, sys - -class url: - base = "http://paste.pocoo.org" - xmlrpc = base + "/xmlrpc/" - show = base + "/show/" - -def pytest_addoption(parser): - group = parser.getgroup("terminal reporting") - group._addoption('--pastebin', metavar="mode", - action='store', dest="pastebin", default=None, - type="choice", choices=['failed', 'all'], - help="send failed|all info to Pocoo pastebin service.") - -def pytest_configure(__multicall__, config): - import tempfile - __multicall__.execute() - if config.option.pastebin == "all": - config._pastebinfile = tempfile.TemporaryFile('w+') - tr = config.pluginmanager.getplugin('terminalreporter') - oldwrite = tr._tw.write - def tee_write(s, **kwargs): - oldwrite(s, **kwargs) - config._pastebinfile.write(str(s)) - tr._tw.write = tee_write - -def pytest_unconfigure(config): - if hasattr(config, '_pastebinfile'): - config._pastebinfile.seek(0) - sessionlog = config._pastebinfile.read() - config._pastebinfile.close() - del config._pastebinfile - proxyid = getproxy().newPaste("python", sessionlog) - pastebinurl = "%s%s" % (url.show, proxyid) - sys.stderr.write("pastebin session-log: %s\n" % pastebinurl) - tr = config.pluginmanager.getplugin('terminalreporter') - del tr._tw.__dict__['write'] - -def getproxy(): - return py.std.xmlrpclib.ServerProxy(url.xmlrpc).pastes - -def pytest_terminal_summary(terminalreporter): - if terminalreporter.config.option.pastebin != "failed": - return - tr = terminalreporter - if 'failed' in tr.stats: - terminalreporter.write_sep("=", "Sending information to Paste Service") - if tr.config.option.debug: - terminalreporter.write_line("xmlrpcurl: %s" %(url.xmlrpc,)) - serverproxy = getproxy() - for rep in terminalreporter.stats.get('failed'): - try: - msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc - except AttributeError: - msg = tr._getfailureheadline(rep) - tw = py.io.TerminalWriter(stringio=True) - rep.toterminal(tw) - s = tw.stringio.getvalue() - assert len(s) - proxyid = serverproxy.newPaste("python", s) - pastebinurl = "%s%s" % (url.show, proxyid) - tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/module/pyexpat/interp_pyexpat.py b/pypy/module/pyexpat/interp_pyexpat.py --- a/pypy/module/pyexpat/interp_pyexpat.py +++ b/pypy/module/pyexpat/interp_pyexpat.py @@ -1,7 +1,7 @@ from pypy.interpreter.baseobjspace import Wrappable from pypy.interpreter.typedef import TypeDef, GetSetProperty -from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped -from pypy.interpreter.gateway import interp2app +from pypy.interpreter.gateway import NoneNotWrapped +from pypy.interpreter.gateway import interp2app, unwrap_spec from pypy.interpreter.error import OperationError from pypy.objspace.descroperation import object_setattr from pypy.rpython.lltypesystem import rffi, lltype @@ -325,6 +325,10 @@ space.wrap(XML_MINOR_VERSION), space.wrap(XML_MICRO_VERSION)]) +class Cache: + def __init__(self, space): + self.w_error = space.new_exception_class("pyexpat.ExpatError") + class W_XMLParserType(Wrappable): def __init__(self, space, parser, w_intern): @@ -357,6 +361,7 @@ global_storage.free_nonmoving_id( rffi.cast(lltype.Signed, self.itself)) + @unwrap_spec(flag=int) def SetParamEntityParsing(self, space, flag): """SetParamEntityParsing(flag) -> success Controls parsing of parameter entities (including the external DTD @@ -365,7 +370,6 @@ XML_PARAM_ENTITY_PARSING_ALWAYS. Returns true if setting the flag was successful.""" XML_SetParamEntityParsing(self.itself, flag) - SetParamEntityParsing.unwrap_spec = ['self', ObjSpace, int] def UseForeignDTD(self, space, w_flag=True): """UseForeignDTD([flag]) @@ -376,7 +380,6 @@ 'flag' defaults to True if not provided.""" flag = space.is_true(w_flag) XML_UseForeignDTD(self.itself, flag) - UseForeignDTD.unwrap_spec = ['self', ObjSpace, W_Root] # Handlers management @@ -499,6 +502,7 @@ return True + @unwrap_spec(name=str) def setattr(self, space, name, w_value): if name == "namespace_prefixes": XML_SetReturnNSTriplet(self.itself, space.int_w(w_value)) @@ -513,15 +517,15 @@ return space.call_function( object_setattr(space), space.wrap(self), space.wrap(name), w_value) - setattr.unwrap_spec = ['self', ObjSpace, str, W_Root] # Parse methods + @unwrap_spec(data=str, isfinal=bool) def Parse(self, space, data, isfinal=False): """Parse(data[, isfinal]) Parse XML data. `isfinal' should be true at end of input.""" - res = XML_Parse(self.itself, data, len(data), bool(isfinal)) + res = XML_Parse(self.itself, data, len(data), isfinal) if self._exc_info: e = self._exc_info self._exc_info = None @@ -531,7 +535,6 @@ raise exc self.flush_character_buffer(space) return space.wrap(res) - Parse.unwrap_spec = ['self', ObjSpace, str, int] def ParseFile(self, space, w_file): """ParseFile(file) @@ -540,11 +543,10 @@ w_data = space.call_method(w_file, 'read') data = space.str_w(w_data) return self.Parse(space, data, isfinal=True) - ParseFile.unwrap_spec = ['self', ObjSpace, W_Root] + @unwrap_spec(base=str) def SetBase(self, space, base): XML_SetBase(self.itself, base) - SetBase.unwrap_spec = ['self', ObjSpace, str] def ExternalEntityParserCreate(self, space, w_context, w_encoding=None): """ExternalEntityParserCreate(context[, encoding]) @@ -572,7 +574,6 @@ parser.handlers[i] = self.handlers[i] return space.wrap(parser) - ExternalEntityParserCreate.unwrap_spec = ['self', ObjSpace, W_Root, W_Root] def flush_character_buffer(self, space): if not self.buffer_w: @@ -593,8 +594,7 @@ lineno = XML_GetCurrentLineNumber(self.itself) colno = XML_GetCurrentColumnNumber(self.itself) msg = "%s: line %d, column %d" % (err, lineno, colno) - w_module = space.getbuiltinmodule('pyexpat') - w_errorcls = space.getattr(w_module, space.wrap('error')) + w_errorcls = space.fromcache(Cache).w_error w_error = space.call_function(w_errorcls, space.wrap(msg)) space.setattr(w_error, space.wrap("code"), space.wrap(code)) space.setattr(w_error, space.wrap("offset"), space.wrap(colno)) @@ -603,21 +603,21 @@ self.w_error = w_error return OperationError(w_errorcls, w_error) - def descr_ErrorCode(space, self): + def descr_ErrorCode(self, space): return space.wrap(XML_GetErrorCode(self.itself)) - def descr_ErrorLineNumber(space, self): + def descr_ErrorLineNumber(self, space): return space.wrap(XML_GetErrorLineNumber(self.itself)) - def descr_ErrorColumnNumber(space, self): + def descr_ErrorColumnNumber(self, space): return space.wrap(XML_GetErrorColumnNumber(self.itself)) - def descr_ErrorByteIndex(space, self): + def descr_ErrorByteIndex(self, space): return space.wrap(XML_GetErrorByteIndex(self.itself)) - def get_buffer_size(space, self): + def get_buffer_size(self, space): return space.wrap(self.buffer_size) - def set_buffer_size(space, self, w_value): + def set_buffer_size(self, space, w_value): value = space.getindex_w(w_value, space.w_TypeError) if value <= 0: raise OperationError(space.w_ValueError, space.wrap( @@ -625,9 +625,9 @@ self.flush_character_buffer(space) self.buffer_size = value - def get_buffer_text(space, self): + def get_buffer_text(self, space): return space.wrap(self.buffer_w is not None) - def set_buffer_text(space, self, w_value): + def set_buffer_text(self, space, w_value): if space.is_true(w_value): self.buffer_w = [] self.buffer_used = 0 @@ -635,7 +635,7 @@ self.flush_character_buffer(space) self.buffer_w = None - def get_intern(space, self): + def get_intern(self, space): if self.w_intern: return self.w_intern else: @@ -676,9 +676,7 @@ CurrentColumnNumber = GetSetProperty(W_XMLParserType.descr_ErrorColumnNumber, cls=W_XMLParserType), CurrentByteIndex = GetSetProperty(W_XMLParserType.descr_ErrorByteIndex, cls=W_XMLParserType), - **dict((name, interp2app(getattr(W_XMLParserType, name), - unwrap_spec=getattr(W_XMLParserType, - name).unwrap_spec)) + **dict((name, interp2app(getattr(W_XMLParserType, name))) for name in XMLParser_methods) ) @@ -740,11 +738,10 @@ parser.itself, UnknownEncodingHandlerData_callback, rffi.cast(rffi.VOIDP, parser.id)) return space.wrap(parser) -ParserCreate.unwrap_spec = [ObjSpace, W_Root, W_Root, W_Root] + at unwrap_spec(code=int) def ErrorString(space, code): """ErrorString(errno) -> string Returns string error for given number.""" return space.wrap(rffi.charp2str(XML_ErrorString(code))) -ErrorString.unwrap_spec = [ObjSpace, int] diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/py/_cmdline/pysvnwcrevert.py b/py/_cmdline/pysvnwcrevert.py deleted file mode 100755 --- a/py/_cmdline/pysvnwcrevert.py +++ /dev/null @@ -1,55 +0,0 @@ -#! /usr/bin/env python -"""\ -py.svnwcrevert [options] WCPATH - -Running this script and then 'svn up' puts the working copy WCPATH in a state -as clean as a fresh check-out. - -WARNING: you'll loose all local changes, obviously! - -This script deletes all files that have been modified -or that svn doesn't explicitly know about, including svn:ignored files -(like .pyc files, hint hint). - -The goal of this script is to leave the working copy with some files and -directories possibly missing, but - most importantly - in a state where -the following 'svn up' won't just crash. -""" - -import sys, py - -def kill(p, root): - print('< %s' % (p.relto(root),)) - p.remove(rec=1) - -def svnwcrevert(path, root=None, precious=[]): - if root is None: - root = path - wcpath = py.path.svnwc(path) - try: - st = wcpath.status() - except ValueError: # typically, "bad char in wcpath" - kill(path, root) - return - for p in path.listdir(): - if p.basename == '.svn' or p.basename in precious: - continue - wcp = py.path.svnwc(p) - if wcp not in st.unchanged and wcp not in st.external: - kill(p, root) - elif p.check(dir=1): - svnwcrevert(p, root) - -# XXX add a functional test - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-p", "--precious", - action="append", dest="precious", default=[], - help="preserve files with this name") - -def main(): - opts, args = parser.parse_args() - if len(args) != 1: - parser.print_help() - sys.exit(2) - svnwcrevert(py.path.local(args[0]), precious=opts.precious) diff --git a/pypy/rpython/memory/gc/minimark.py b/pypy/rpython/memory/gc/minimark.py --- a/pypy/rpython/memory/gc/minimark.py +++ b/pypy/rpython/memory/gc/minimark.py @@ -1,7 +1,7 @@ """ MiniMark GC. Environment variables can be used to fine-tune the following parameters: - + PYPY_GC_NURSERY The nursery size. Defaults to half the size of the L2 cache. Try values like '1.2MB'. Small values (like 1 or 1KB) are useful for debugging. @@ -108,12 +108,13 @@ GCFLAG_HAS_CARDS = first_gcflag << 5 GCFLAG_CARDS_SET = first_gcflag << 6 # <- at least one card bit is set +TID_MASK = (first_gcflag << 7) - 1 + FORWARDSTUB = lltype.GcStruct('forwarding_stub', ('forw', llmemory.Address)) FORWARDSTUBPTR = lltype.Ptr(FORWARDSTUB) - # ____________________________________________________________ class MiniMarkGC(MovingGCBase): @@ -852,9 +853,13 @@ that can never be set on a young object -- except if tid == -42. """ assert self.is_in_nursery(obj) - result = (self.header(obj).tid & GCFLAG_FINALIZATION_ORDERING != 0) + tid = self.header(obj).tid + result = (tid & GCFLAG_FINALIZATION_ORDERING != 0) if result: - ll_assert(self.header(obj).tid == -42, "bogus header for young obj") + ll_assert(tid == -42, "bogus header for young obj") + else: + ll_assert(bool(tid), "bogus header (1)") + ll_assert(tid & ~TID_MASK == 0, "bogus header (2)") return result def get_forwarding_address(self, obj): diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/py/apipkg.py b/py/apipkg.py deleted file mode 100644 --- a/py/apipkg.py +++ /dev/null @@ -1,95 +0,0 @@ -""" -apipkg: control the exported namespace of a python package. - -see http://pypi.python.org/pypi/apipkg - -(c) holger krekel, 2009 - MIT license -""" -import sys -from types import ModuleType - -__version__ = "1.0b6" - -def initpkg(pkgname, exportdefs): - """ initialize given package from the export definitions. """ - mod = ApiModule(pkgname, exportdefs, implprefix=pkgname) - oldmod = sys.modules[pkgname] - mod.__file__ = getattr(oldmod, '__file__', None) - mod.__version__ = getattr(oldmod, '__version__', '0') - for name in ('__path__', '__loader__'): - if hasattr(oldmod, name): - setattr(mod, name, getattr(oldmod, name)) - sys.modules[pkgname] = mod - -def importobj(modpath, attrname): - module = __import__(modpath, None, None, ['__doc__']) - return getattr(module, attrname) - -class ApiModule(ModuleType): - def __init__(self, name, importspec, implprefix=None): - self.__name__ = name - self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] - self.__map__ = {} - self.__implprefix__ = implprefix or name - for name, importspec in importspec.items(): - if isinstance(importspec, dict): - subname = '%s.%s'%(self.__name__, name) - apimod = ApiModule(subname, importspec, implprefix) - sys.modules[subname] = apimod - setattr(self, name, apimod) - else: - modpath, attrname = importspec.split(':') - if modpath[0] == '.': - modpath = implprefix + modpath - if name == '__doc__': - self.__doc__ = importobj(modpath, attrname) - else: - self.__map__[name] = (modpath, attrname) - - def __repr__(self): - l = [] - if hasattr(self, '__version__'): - l.append("version=" + repr(self.__version__)) - if hasattr(self, '__file__'): - l.append('from ' + repr(self.__file__)) - if l: - return '' % (self.__name__, " ".join(l)) - return '' % (self.__name__,) - - def __makeattr(self, name): - """lazily compute value for name or raise AttributeError if unknown.""" - target = None - if '__onfirstaccess__' in self.__map__: - target = self.__map__.pop('__onfirstaccess__') - importobj(*target)() - try: - modpath, attrname = self.__map__[name] - except KeyError: - if target is not None and name != '__onfirstaccess__': - # retry, onfirstaccess might have set attrs - return getattr(self, name) - raise AttributeError(name) - else: - result = importobj(modpath, attrname) - setattr(self, name, result) - try: - del self.__map__[name] - except KeyError: - pass # in a recursive-import situation a double-del can happen - return result - - __getattr__ = __makeattr - - def __dict__(self): - # force all the content of the module to be loaded when __dict__ is read - dictdescr = ModuleType.__dict__['__dict__'] - dict = dictdescr.__get__(self) - if dict is not None: - hasattr(self, 'some') - for name in self.__all__: - try: - self.__makeattr(name) - except AttributeError: - pass - return dict - __dict__ = property(__dict__) diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/py/bin/env.py b/py/bin/env.py deleted file mode 100644 --- a/py/bin/env.py +++ /dev/null @@ -1,33 +0,0 @@ -#!/usr/bin/env python - -import sys, os, os.path - -progpath = sys.argv[0] -packagedir = os.path.dirname(os.path.dirname(os.path.abspath(progpath))) -packagename = os.path.basename(packagedir) -bindir = os.path.join(packagedir, 'bin') -if sys.platform == 'win32': - bindir = os.path.join(bindir, 'win32') -rootdir = os.path.dirname(packagedir) - -def prepend_path(name, value): - sep = os.path.pathsep - curpath = os.environ.get(name, '') - newpath = [value] + [ x for x in curpath.split(sep) if x and x != value ] - return setenv(name, sep.join(newpath)) - -def setenv(name, value): - shell = os.environ.get('SHELL', '') - comspec = os.environ.get('COMSPEC', '') - if shell.endswith('csh'): - cmd = 'setenv %s "%s"' % (name, value) - elif shell.endswith('sh'): - cmd = '%s="%s"; export %s' % (name, value, name) - elif comspec.endswith('cmd.exe'): - cmd = 'set %s=%s' % (name, value) - else: - assert False, 'Shell not supported.' - return cmd - -print(prepend_path('PATH', bindir)) -print(prepend_path('PYTHONPATH', rootdir)) diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/py/_test/funcargs.py b/py/_test/funcargs.py deleted file mode 100644 --- a/py/_test/funcargs.py +++ /dev/null @@ -1,176 +0,0 @@ -import py - -def getfuncargnames(function): - argnames = py.std.inspect.getargs(py.code.getrawcode(function))[0] - startindex = py.std.inspect.ismethod(function) and 1 or 0 - defaults = getattr(function, 'func_defaults', - getattr(function, '__defaults__', None)) or () - numdefaults = len(defaults) - if numdefaults: - return argnames[startindex:-numdefaults] - return argnames[startindex:] - -def fillfuncargs(function): - """ fill missing funcargs. """ - request = FuncargRequest(pyfuncitem=function) - request._fillfuncargs() - -def getplugins(node, withpy=False): # might by any node - plugins = node.config._getmatchingplugins(node.fspath) - if withpy: - mod = node.getparent(py.test.collect.Module) - if mod is not None: - plugins.append(mod.obj) - inst = node.getparent(py.test.collect.Instance) - if inst is not None: - plugins.append(inst.obj) - return plugins - -_notexists = object() -class CallSpec: - def __init__(self, funcargs, id, param): - self.funcargs = funcargs - self.id = id - if param is not _notexists: - self.param = param - def __repr__(self): - return "" %( - self.id, getattr(self, 'param', '?'), self.funcargs) - -class Metafunc: - def __init__(self, function, config=None, cls=None, module=None): - self.config = config - self.module = module - self.function = function - self.funcargnames = getfuncargnames(function) - self.cls = cls - self.module = module - self._calls = [] - self._ids = py.builtin.set() - - def addcall(self, funcargs=None, id=_notexists, param=_notexists): - assert funcargs is None or isinstance(funcargs, dict) - if id is None: - raise ValueError("id=None not allowed") - if id is _notexists: - id = len(self._calls) - id = str(id) - if id in self._ids: - raise ValueError("duplicate id %r" % id) - self._ids.add(id) - self._calls.append(CallSpec(funcargs, id, param)) - -class FuncargRequest: - _argprefix = "pytest_funcarg__" - _argname = None - - class LookupError(LookupError): - """ error on performing funcarg request. """ - - def __init__(self, pyfuncitem): - self._pyfuncitem = pyfuncitem - self.function = pyfuncitem.obj - self.module = pyfuncitem.getparent(py.test.collect.Module).obj - clscol = pyfuncitem.getparent(py.test.collect.Class) - self.cls = clscol and clscol.obj or None - self.instance = py.builtin._getimself(self.function) - self.config = pyfuncitem.config - self.fspath = pyfuncitem.fspath - if hasattr(pyfuncitem, '_requestparam'): - self.param = pyfuncitem._requestparam - self._plugins = getplugins(pyfuncitem, withpy=True) - self._funcargs = self._pyfuncitem.funcargs.copy() - self._name2factory = {} - self._currentarg = None - - def _fillfuncargs(self): - argnames = getfuncargnames(self.function) - if argnames: - assert not getattr(self._pyfuncitem, '_args', None), ( - "yielded functions cannot have funcargs") - for argname in argnames: - if argname not in self._pyfuncitem.funcargs: - self._pyfuncitem.funcargs[argname] = self.getfuncargvalue(argname) - - def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): - """ cache and return result of calling setup(). - - The requested argument name, the scope and the ``extrakey`` - determine the cache key. The scope also determines when - teardown(result) will be called. valid scopes are: - scope == 'function': when the single test function run finishes. - scope == 'module': when tests in a different module are run - scope == 'session': when tests of the session have run. - """ - if not hasattr(self.config, '_setupcache'): - self.config._setupcache = {} # XXX weakref? - cachekey = (self._currentarg, self._getscopeitem(scope), extrakey) - cache = self.config._setupcache - try: - val = cache[cachekey] - except KeyError: - val = setup() - cache[cachekey] = val - if teardown is not None: - def finalizer(): - del cache[cachekey] - teardown(val) - self._addfinalizer(finalizer, scope=scope) - return val - - def getfuncargvalue(self, argname): - try: - return self._funcargs[argname] - except KeyError: - pass - if argname not in self._name2factory: - self._name2factory[argname] = self.config.pluginmanager.listattr( - plugins=self._plugins, - attrname=self._argprefix + str(argname) - ) - #else: we are called recursively - if not self._name2factory[argname]: - self._raiselookupfailed(argname) - funcargfactory = self._name2factory[argname].pop() - oldarg = self._currentarg - self._currentarg = argname - try: - self._funcargs[argname] = res = funcargfactory(request=self) - finally: - self._currentarg = oldarg - return res - - def _getscopeitem(self, scope): - if scope == "function": - return self._pyfuncitem - elif scope == "module": - return self._pyfuncitem.getparent(py.test.collect.Module) - elif scope == "session": - return None - raise ValueError("unknown finalization scope %r" %(scope,)) - - def _addfinalizer(self, finalizer, scope): - colitem = self._getscopeitem(scope) - self.config._setupstate.addfinalizer( - finalizer=finalizer, colitem=colitem) - - def addfinalizer(self, finalizer): - """ call the given finalizer after test function finished execution. """ - self._addfinalizer(finalizer, scope="function") - - def __repr__(self): - return "" %(self._pyfuncitem) - - def _raiselookupfailed(self, argname): - available = [] - for plugin in self._plugins: - for name in vars(plugin): - if name.startswith(self._argprefix): - name = name[len(self._argprefix):] - if name not in available: - available.append(name) - fspath, lineno, msg = self._pyfuncitem.reportinfo() - msg = "LookupError: no factory found for function argument %r" % (argname,) - msg += "\n available funcargs: %s" %(", ".join(available),) - msg += "\n use 'py.test --funcargs [testpath]' for help on them." - raise self.LookupError(msg) diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/annotation/model.py b/pypy/annotation/model.py --- a/pypy/annotation/model.py +++ b/pypy/annotation/model.py @@ -34,7 +34,7 @@ from pypy.tool.pairtype import pair, extendabletype from pypy.tool.tls import tlsobject from pypy.rlib.rarithmetic import r_uint, r_ulonglong, base_int -from pypy.rlib.rarithmetic import r_singlefloat, r_longfloat, isnan +from pypy.rlib.rarithmetic import r_singlefloat, r_longfloat import inspect, weakref DEBUG = False # set to False to disable recording of debugging information @@ -165,12 +165,12 @@ def __eq__(self, other): if (type(self) is SomeFloat and type(other) is SomeFloat and self.is_constant() and other.is_constant()): + from pypy.rlib.rfloat import isnan, copysign # NaN unpleasantness. if isnan(self.const) and isnan(other.const): return True # 0.0 vs -0.0 unpleasantness. if not self.const and not other.const: - from pypy.rlib.rarithmetic import copysign return copysign(1., self.const) == copysign(1., other.const) # return super(SomeFloat, self).__eq__(other) diff --git a/py/_compat/dep_textwrap.py b/py/_compat/dep_textwrap.py deleted file mode 100644 --- a/py/_compat/dep_textwrap.py +++ /dev/null @@ -1,5 +0,0 @@ -import py - -py.log._apiwarn("1.1", "py.compat.textwrap deprecated, use standard library version.", - stacklevel="apipkg") -textwrap = py.std.textwrap diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/py/_cmdline/pylookup.py b/py/_cmdline/pylookup.py deleted file mode 100755 --- a/py/_cmdline/pylookup.py +++ /dev/null @@ -1,85 +0,0 @@ -#!/usr/bin/env python - -"""\ -py.lookup [search_directory] SEARCH_STRING [options] - -Looks recursively at Python files for a SEARCH_STRING, starting from the -present working directory. Prints the line, with the filename and line-number -prepended.""" - -import sys, os -import py -from py.io import ansi_print, get_terminal_width -import re - -def rec(p): - return p.check(dotfile=0) - -parser = py.std.optparse.OptionParser(usage=__doc__) -parser.add_option("-i", "--ignore-case", action="store_true", dest="ignorecase", - help="ignore case distinctions") -parser.add_option("-C", "--context", action="store", type="int", dest="context", - default=0, help="How many lines of output to show") - -terminal_width = get_terminal_width() - -def find_indexes(search_line, string): - indexes = [] - before = 0 - while 1: - i = search_line.find(string, before) - if i == -1: - break - indexes.append(i) - before = i + len(string) - return indexes - -def main(): - (options, args) = parser.parse_args() - if len(args) == 2: - search_dir, string = args - search_dir = py.path.local(search_dir) - else: - search_dir = py.path.local() - string = args[0] - if options.ignorecase: - string = string.lower() - for x in search_dir.visit('*.py', rec): - # match filename directly - s = x.relto(search_dir) - if options.ignorecase: - s = s.lower() - if s.find(string) != -1: - sys.stdout.write("%s: filename matches %r" %(x, string) + "\n") - - try: - s = x.read() - except py.error.ENOENT: - pass # whatever, probably broken link (ie emacs lock) - searchs = s - if options.ignorecase: - searchs = s.lower() - if s.find(string) != -1: - lines = s.splitlines() - if options.ignorecase: - searchlines = s.lower().splitlines() - else: - searchlines = lines - for i, (line, searchline) in enumerate(zip(lines, searchlines)): - indexes = find_indexes(searchline, string) - if not indexes: - continue - if not options.context: - sys.stdout.write("%s:%d: " %(x.relto(search_dir), i+1)) - last_index = 0 - for index in indexes: - sys.stdout.write(line[last_index: index]) - ansi_print(line[index: index+len(string)], - file=sys.stdout, esc=31, newline=False) - last_index = index + len(string) - sys.stdout.write(line[last_index:] + "\n") - else: - context = (options.context)/2 - for count in range(max(0, i-context), min(len(lines) - 1, i+context+1)): - print("%s:%d: %s" %(x.relto(search_dir), count+1, lines[count].rstrip())) - print("-" * terminal_width) diff --git a/py/_plugin/pytest_pylint.py b/py/_plugin/pytest_pylint.py deleted file mode 100644 --- a/py/_plugin/pytest_pylint.py +++ /dev/null @@ -1,36 +0,0 @@ -"""pylint plugin - -XXX: Currently in progress, NOT IN WORKING STATE. -""" -import py - -pylint = py.test.importorskip("pylint.lint") - -def pytest_addoption(parser): - group = parser.getgroup('pylint options') - group.addoption('--pylint', action='store_true', - default=False, dest='pylint', - help='run pylint on python files.') - -def pytest_collect_file(path, parent): - if path.ext == ".py": - if parent.config.getvalue('pylint'): - return PylintItem(path, parent) - -#def pytest_terminal_summary(terminalreporter): -# print 'placeholder for pylint output' - -class PylintItem(py.test.collect.Item): - def runtest(self): - capture = py.io.StdCaptureFD() - try: - linter = pylint.lint.PyLinter() - linter.check(str(self.fspath)) - finally: - out, err = capture.reset() - rating = out.strip().split('\n')[-1] - sys.stdout.write(">>>") - print(rating) - assert 0 - - diff --git a/py/_cmdline/pytest.py b/py/_cmdline/pytest.py deleted file mode 100755 --- a/py/_cmdline/pytest.py +++ /dev/null @@ -1,5 +0,0 @@ -#!/usr/bin/env python -import py - -def main(args=None): - raise SystemExit(py.test.cmdline.main(args)) diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -47,7 +47,7 @@ return True return graphanalyze.GraphAnalyzer.analyze_external_call(self, op, seen) - def analyze_simple_operation(self, op): + def analyze_simple_operation(self, op, graphinfo): if op.opname in ('malloc', 'malloc_varsize'): flags = op.args[1].value return flags['flavor'] == 'gc' and not flags.get('nocollect', False) diff --git a/py/_test/config.py b/py/_test/config.py deleted file mode 100644 --- a/py/_test/config.py +++ /dev/null @@ -1,291 +0,0 @@ -import py, os -from py._test.conftesthandle import Conftest -from py._test.pluginmanager import PluginManager -from py._test import parseopt -from py._test.collect import RootCollector - -def ensuretemp(string, dir=1): - """ (deprecated) return temporary directory path with - the given string as the trailing part. It is usually - better to use the 'tmpdir' function argument which will - take care to provide empty unique directories for each - test call even if the test is called multiple times. - """ - #py.log._apiwarn(">1.1", "use tmpdir function argument") - return py.test.config.ensuretemp(string, dir=dir) - -class CmdOptions(object): - """ holds cmdline options as attributes.""" - def __init__(self, **kwargs): - self.__dict__.update(kwargs) - def __repr__(self): - return "" %(self.__dict__,) - -class Error(Exception): - """ Test Configuration Error. """ - -class Config(object): - """ access to config values, pluginmanager and plugin hooks. """ - Option = py.std.optparse.Option - Error = Error - basetemp = None - _sessionclass = None - - def __init__(self, topdir=None, option=None): - self.option = option or CmdOptions() - self.topdir = topdir - self._parser = parseopt.Parser( - usage="usage: %prog [options] [file_or_dir] [file_or_dir] [...]", - processopt=self._processopt, - ) - self.pluginmanager = PluginManager() - self._conftest = Conftest(onimport=self._onimportconftest) - self.hook = self.pluginmanager.hook - - def _onimportconftest(self, conftestmodule): - self.trace("loaded conftestmodule %r" %(conftestmodule,)) - self.pluginmanager.consider_conftest(conftestmodule) - - def _getmatchingplugins(self, fspath): - allconftests = self._conftest._conftestpath2mod.values() - plugins = [x for x in self.pluginmanager.getplugins() - if x not in allconftests] - plugins += self._conftest.getconftestmodules(fspath) - return plugins - - def trace(self, msg): - if getattr(self.option, 'traceconfig', None): - self.hook.pytest_trace(category="config", msg=msg) - - def _processopt(self, opt): - if hasattr(opt, 'default') and opt.dest: - val = os.environ.get("PYTEST_OPTION_" + opt.dest.upper(), None) - if val is not None: - if opt.type == "int": - val = int(val) - elif opt.type == "long": - val = long(val) - elif opt.type == "float": - val = float(val) - elif not opt.type and opt.action in ("store_true", "store_false"): - val = eval(val) - opt.default = val - else: - name = "option_" + opt.dest - try: - opt.default = self._conftest.rget(name) - except (ValueError, KeyError): - pass - if not hasattr(self.option, opt.dest): - setattr(self.option, opt.dest, opt.default) - - def _preparse(self, args): - self.pluginmanager.consider_setuptools_entrypoints() - self.pluginmanager.consider_env() - self.pluginmanager.consider_preparse(args) - self._conftest.setinitial(args) - self.pluginmanager.do_addoption(self._parser) - - def parse(self, args): - """ parse cmdline arguments into this config object. - Note that this can only be called once per testing process. - """ - assert not hasattr(self, 'args'), ( - "can only parse cmdline args at most once per Config object") - self._preparse(args) - self._parser.hints.extend(self.pluginmanager._hints) - args = self._parser.parse_setoption(args, self.option) - if not args: - args.append(py.std.os.getcwd()) - self.topdir = gettopdir(args) - self._rootcol = RootCollector(config=self) - self._setargs(args) - - def _setargs(self, args): - self.args = list(args) - self._argfspaths = [py.path.local(decodearg(x)[0]) for x in args] - - # config objects are usually pickled across system - # barriers but they contain filesystem paths. - # upon getstate/setstate we take care to do everything - # relative to "topdir". - def __getstate__(self): - l = [] - for path in self.args: - path = py.path.local(path) - l.append(path.relto(self.topdir)) - return l, self.option.__dict__ - - def __setstate__(self, repr): - # we have to set py.test.config because loading - # of conftest files may use it (deprecated) - # mainly by py.test.config.addoptions() - global config_per_process - py.test.config = config_per_process = self - args, cmdlineopts = repr - cmdlineopts = CmdOptions(**cmdlineopts) - # next line will registers default plugins - self.__init__(topdir=py.path.local(), option=cmdlineopts) - self._rootcol = RootCollector(config=self) - args = [str(self.topdir.join(x)) for x in args] - self._preparse(args) - self._setargs(args) - - def ensuretemp(self, string, dir=True): - return self.getbasetemp().ensure(string, dir=dir) - - def getbasetemp(self): - if self.basetemp is None: - basetemp = self.option.basetemp - if basetemp: - basetemp = py.path.local(basetemp) - if not basetemp.check(dir=1): - basetemp.mkdir() - else: - basetemp = py.path.local.make_numbered_dir(prefix='pytest-') - self.basetemp = basetemp - return self.basetemp - - def mktemp(self, basename, numbered=False): - basetemp = self.getbasetemp() - if not numbered: - return basetemp.mkdir(basename) - else: - return py.path.local.make_numbered_dir(prefix=basename, - keep=0, rootdir=basetemp, lock_timeout=None) - - def getinitialnodes(self): - return [self.getnode(arg) for arg in self.args] - - def getnode(self, arg): - parts = decodearg(arg) - path = py.path.local(parts.pop(0)) - if not path.check(): - raise self.Error("file not found: %s" %(path,)) - topdir = self.topdir - if path != topdir and not path.relto(topdir): - raise self.Error("path %r is not relative to %r" % - (str(path), str(topdir))) - # assumtion: pytest's fs-collector tree follows the filesystem tree - names = list(filter(None, path.relto(topdir).split(path.sep))) - names += parts - try: - return self._rootcol.getbynames(names) - except ValueError: - e = py.std.sys.exc_info()[1] - raise self.Error("can't collect: %s\n%s" % (arg, e.args[0])) - - def _getcollectclass(self, name, path): - try: - cls = self._conftest.rget(name, path) - except KeyError: - return getattr(py.test.collect, name) - else: - py.log._apiwarn(">1.1", "%r was found in a conftest.py file, " - "use pytest_collect hooks instead." % (cls,)) - return cls - - def getconftest_pathlist(self, name, path=None): - """ return a matching value, which needs to be sequence - of filenames that will be returned as a list of Path - objects (they can be relative to the location - where they were found). - """ - try: - mod, relroots = self._conftest.rget_with_confmod(name, path) - except KeyError: - return None - modpath = py.path.local(mod.__file__).dirpath() - l = [] - for relroot in relroots: - if not isinstance(relroot, py.path.local): - relroot = relroot.replace("/", py.path.local.sep) - relroot = modpath.join(relroot, abs=True) - l.append(relroot) - return l - - def addoptions(self, groupname, *specs): - """ add a named group of options to the current testing session. - This function gets invoked during testing session initialization. - """ - py.log._apiwarn("1.0", "define pytest_addoptions(parser) to add options", stacklevel=2) - group = self._parser.getgroup(groupname) - for opt in specs: - group._addoption_instance(opt) - return self.option - - def addoption(self, *optnames, **attrs): - return self._parser.addoption(*optnames, **attrs) - - def getvalueorskip(self, name, path=None): - """ return getvalue() or call py.test.skip if no value exists. """ - try: - val = self.getvalue(name, path) - if val is None: - raise KeyError(name) - return val - except KeyError: - py.test.skip("no %r value found" %(name,)) - - def getvalue(self, name, path=None): - """ return 'name' value looked up from the 'options' - and then from the first conftest file found up - the path (including the path itself). - if path is None, lookup the value in the initial - conftest modules found during command line parsing. - """ - try: - return getattr(self.option, name) - except AttributeError: - return self._conftest.rget(name, path) - - def setsessionclass(self, cls): - if self._sessionclass is not None: - raise ValueError("sessionclass already set to: %r" %( - self._sessionclass)) - self._sessionclass = cls - - def initsession(self): - """ return an initialized session object. """ - cls = self._sessionclass - if cls is None: - from py._test.session import Session - cls = Session - session = cls(self) - self.trace("instantiated session %r" % session) - return session - -# -# helpers -# - -def gettopdir(args): - """ return the top directory for the given paths. - if the common base dir resides in a python package - parent directory of the root package is returned. - """ - fsargs = [py.path.local(decodearg(arg)[0]) for arg in args] - p = fsargs and fsargs[0] or None - for x in fsargs[1:]: - p = p.common(x) - assert p, "cannot determine common basedir of %s" %(fsargs,) - pkgdir = p.pypkgpath() - if pkgdir is None: - if p.check(file=1): - p = p.dirpath() - return p - else: - return pkgdir.dirpath() - -def decodearg(arg): - arg = str(arg) - return arg.split("::") - -def onpytestaccess(): - # it's enough to have our containing module loaded as - # it initializes a per-process config instance - # which loads default plugins which add to py.test.* - pass - -# a default per-process instance of py.test configuration -config_per_process = Config() diff --git a/pypy/rpython/lltypesystem/test/test_rffi.py b/pypy/rpython/lltypesystem/test/test_rffi.py --- a/pypy/rpython/lltypesystem/test/test_rffi.py +++ b/pypy/rpython/lltypesystem/test/test_rffi.py @@ -769,6 +769,9 @@ def test_ptradd_interpret(): interpret(test_ptradd, []) +def test_voidptr(): + assert repr(VOIDP) == "<* Array of void >" + class TestCRffi(BaseTestRffi): def compile(self, func, args, **kwds): return compile_c(func, args, **kwds) diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -155,16 +155,24 @@ self.emit_operation(op) def optimize_CALL_PURE(self, op): + arg_consts = [] for i in range(op.numargs()): arg = op.getarg(i) - if self.get_constant_box(arg) is None: + const = self.get_constant_box(arg) + if const is None: break + arg_consts.append(const) else: - # all constant arguments: constant-fold away - self.make_constant(op.result, op.getarg(0)) - return + # all constant arguments: check if we already know the reslut + try: + result = self.optimizer.call_pure_results[arg_consts] + except KeyError: + pass + else: + self.make_constant(op.result, result) + return # replace CALL_PURE with just CALL - args = op.getarglist()[1:] + args = op.getarglist() self.emit_operation(ResOperation(rop.CALL, args, op.result, op.getdescr())) diff --git a/py/_plugin/__init__.py b/py/_plugin/__init__.py deleted file mode 100644 --- a/py/_plugin/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/py/_test/conftesthandle.py b/py/_test/conftesthandle.py deleted file mode 100644 --- a/py/_test/conftesthandle.py +++ /dev/null @@ -1,113 +0,0 @@ -import py - -class Conftest(object): - """ the single place for accessing values and interacting - towards conftest modules from py.test objects. - - (deprecated) - Note that triggering Conftest instances to import - conftest.py files may result in added cmdline options. - """ - def __init__(self, onimport=None, confcutdir=None): - self._path2confmods = {} - self._onimport = onimport - self._conftestpath2mod = {} - self._confcutdir = confcutdir - - def setinitial(self, args): - """ try to find a first anchor path for looking up global values - from conftests. This function is usually called _before_ - argument parsing. conftest files may add command line options - and we thus have no completely safe way of determining - which parts of the arguments are actually related to options - and which are file system paths. We just try here to get - bootstrapped ... - """ - current = py.path.local() - opt = '--confcutdir' - for i in range(len(args)): - opt1 = str(args[i]) - if opt1.startswith(opt): - if opt1 == opt: - if len(args) > i: - p = current.join(args[i+1], abs=True) - elif opt1.startswith(opt + "="): - p = current.join(opt1[len(opt)+1:], abs=1) - self._confcutdir = p - break - for arg in args + [current]: - anchor = current.join(arg, abs=1) - if anchor.check(): # we found some file object - self._path2confmods[None] = self.getconftestmodules(anchor) - # let's also consider test* dirs - if anchor.check(dir=1): - for x in anchor.listdir(lambda x: x.check(dir=1, dotfile=0)): - self.getconftestmodules(x) - break - else: - assert 0, "no root of filesystem?" - - def getconftestmodules(self, path): - """ return a list of imported conftest modules for the given path. """ - try: - clist = self._path2confmods[path] - except KeyError: - if path is None: - raise ValueError("missing default confest.") - dp = path.dirpath() - if dp == path: - clist = [] - else: - cutdir = self._confcutdir - clist = self.getconftestmodules(dp) - if cutdir and path != cutdir and not path.relto(cutdir): - pass - else: - conftestpath = path.join("conftest.py") - if conftestpath.check(file=1): - clist.append(self.importconftest(conftestpath)) - self._path2confmods[path] = clist - # be defensive: avoid changes from caller side to - # affect us by always returning a copy of the actual list - return clist[:] - - def rget(self, name, path=None): - mod, value = self.rget_with_confmod(name, path) - return value - - def rget_with_confmod(self, name, path=None): - modules = self.getconftestmodules(path) - modules.reverse() - for mod in modules: - try: - return mod, getattr(mod, name) - except AttributeError: - continue - raise KeyError(name) - - def importconftest(self, conftestpath): - assert conftestpath.check(), conftestpath - try: - return self._conftestpath2mod[conftestpath] - except KeyError: - if not conftestpath.dirpath('__init__.py').check(file=1): - # HACK: we don't want any "globally" imported conftest.py, - # prone to conflicts and subtle problems - modname = str(conftestpath).replace('.', conftestpath.sep) - mod = conftestpath.pyimport(modname=modname) - else: - mod = conftestpath.pyimport() - self._conftestpath2mod[conftestpath] = mod - dirpath = conftestpath.dirpath() - if dirpath in self._path2confmods: - for path, mods in self._path2confmods.items(): - if path and path.relto(dirpath) or path == dirpath: - assert mod not in mods - mods.append(mod) - self._postimport(mod) - return mod - - def _postimport(self, mod): - if self._onimport: - self._onimport(mod) - return mod diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -32,7 +32,8 @@ "crypt", "signal", "_rawffi", "termios", "zlib", "bz2", "struct", "_hashlib", "_md5", "_sha", "_minimal_curses", "cStringIO", "thread", "itertools", "pyexpat", "_ssl", "cpyext", "array", - "_bisect", "binascii", "_multiprocessing", '_warnings'] + "_bisect", "binascii", "_multiprocessing", '_warnings', + "_collections"] )) translation_modules = default_modules.copy() @@ -79,8 +80,7 @@ "_rawffi": [("objspace.usemodules.struct", True)], "cpyext": [("translation.secondaryentrypoints", "cpyext"), ("translation.shared", sys.platform == "win32")], - "_ffi": [("translation.jit_ffi", True)], - } +} module_import_dependencies = { # no _rawffi if importing pypy.rlib.clibffi raises ImportError @@ -351,7 +351,7 @@ config.objspace.std.suggest(builtinshortcut=True) config.objspace.std.suggest(optimized_list_getitem=True) config.objspace.std.suggest(getattributeshortcut=True) - config.objspace.std.suggest(newshortcut=True) + config.objspace.std.suggest(newshortcut=True) if not IS_64_BITS: config.objspace.std.suggest(withsmalllong=True) diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Thu Mar 24 09:07:26 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 09:07:26 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: Fix test_mapdict. Message-ID: <20110324080726.17E4B2A202E@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42888:6dfb0dd2e503 Date: 2011-03-22 17:32 +0100 http://bitbucket.org/pypy/pypy/changeset/6dfb0dd2e503/ Log: Fix test_mapdict. diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -1,6 +1,7 @@ import weakref from pypy.rlib import jit, objectmodel, debug from pypy.rlib.rarithmetic import intmask, r_uint +from pypy.rlib import rerased from pypy.interpreter.baseobjspace import W_Root from pypy.objspace.std.dictmultiobject import W_DictMultiObject @@ -515,11 +516,9 @@ nmin1 = n - 1 rangenmin1 = unroll.unrolling_iterable(range(nmin1)) if use_erased: - from pypy.rlib import rerased - erase = rerased.erase - unerase = rerased.unerase + erase = erase_item + unerase = unerase_item else: - rerased = None # don't use in that case erase = lambda x: x unerase = lambda x, t: x # @@ -548,12 +547,11 @@ if index < nmin1: for i in rangenmin1: if index == i: - erased = getattr(self, "_value%s" % i) - return unerase_item(erased) + return getattr(self, "_value%s" % i) if self._has_storage_list(): return self._mapdict_get_storage_list()[index - nmin1] erased = getattr(self, "_value%s" % nmin1) - return unerase_item(erased) + return unerase(erased) def _mapdict_write_storage(self, index, value): for i in rangenmin1: @@ -563,7 +561,7 @@ if self._has_storage_list(): self._mapdict_get_storage_list()[index - nmin1] = value return - erased = erase_item(value) + erased = erase(value) setattr(self, "_value%s" % nmin1, erased) def _mapdict_storage_length(self): @@ -583,14 +581,14 @@ has_storage_list = self._has_storage_list() if len_storage < n: assert not has_storage_list - erased = erase_item(None) + erased = erase(None) elif len_storage == n: assert not has_storage_list - erased = erase_item(storage[nmin1]) + erased = erase(storage[nmin1]) elif not has_storage_list: # storage is longer than self.map.length() only due to # overallocation - erased = erase_item(storage[nmin1]) + erased = erase(storage[nmin1]) # in theory, we should be ultra-paranoid and check all entries, # but checking just one should catch most problems anyway: assert storage[n] is None From commits-noreply at bitbucket.org Thu Mar 24 09:07:38 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 09:07:38 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: hg merge default Message-ID: <20110324080738.BCA1C2A202F@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42889:cb157cdd6e25 Date: 2011-03-24 09:07 +0100 http://bitbucket.org/pypy/pypy/changeset/cb157cdd6e25/ Log: hg merge default diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,2 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,2 +0,0 @@ -80037 greenlet -80409 lib_pypy/pyrepl From commits-noreply at bitbucket.org Thu Mar 24 09:13:39 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 09:13:39 +0100 (CET) Subject: [pypy-svn] pypy default: Fix. Message-ID: <20110324081339.1F1C936C204@codespeak.net> Author: Armin Rigo Branch: Changeset: r42890:284827ae5942 Date: 2011-03-24 09:11 +0100 http://bitbucket.org/pypy/pypy/changeset/284827ae5942/ Log: Fix. diff --git a/pypy/translator/goal/targetrpystonedalone.py b/pypy/translator/goal/targetrpystonedalone.py --- a/pypy/translator/goal/targetrpystonedalone.py +++ b/pypy/translator/goal/targetrpystonedalone.py @@ -2,11 +2,11 @@ from pypy.translator.test import rpystone from pypy.translator.goal import richards import pypy.interpreter.gateway # needed before sys, order of imports !!! -from pypy.module.sys.version import svn_revision +from pypy.tool.version import get_repo_version_info # __________ Entry point __________ -VERSION = svn_revision() +VERSION = get_repo_version_info()[2] # note that we have %f but no length specifiers in RPython From commits-noreply at bitbucket.org Thu Mar 24 09:13:39 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 09:13:39 +0100 (CET) Subject: [pypy-svn] pypy 32ptr-on-64bit: hg merge default Message-ID: <20110324081339.6974736C20A@codespeak.net> Author: Armin Rigo Branch: 32ptr-on-64bit Changeset: r42891:1a9ae3b6bd36 Date: 2011-03-24 09:12 +0100 http://bitbucket.org/pypy/pypy/changeset/1a9ae3b6bd36/ Log: hg merge default From commits-noreply at bitbucket.org Thu Mar 24 10:10:00 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 10:10:00 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Fix test. Message-ID: <20110324091000.559152A202E@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42892:56ee0c690ecd Date: 2011-03-24 09:52 +0100 http://bitbucket.org/pypy/pypy/changeset/56ee0c690ecd/ Log: Fix test. diff --git a/pypy/jit/metainterp/test/test_compile.py b/pypy/jit/metainterp/test/test_compile.py --- a/pypy/jit/metainterp/test/test_compile.py +++ b/pypy/jit/metainterp/test/test_compile.py @@ -41,7 +41,6 @@ return repr(op) class FakeState(object): - optimize_loop = staticmethod(nounroll_optimize.optimize_loop) enable_opts = ALL_OPTS_DICT.copy() enable_opts.pop('unroll') From commits-noreply at bitbucket.org Thu Mar 24 10:10:02 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 10:10:02 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Instead of storing the memo on the single global instance of Logger, Message-ID: <20110324091002.471482A2031@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42893:68c27948ff6f Date: 2011-03-24 09:56 +0100 http://bitbucket.org/pypy/pypy/changeset/68c27948ff6f/ Log: Instead of storing the memo on the single global instance of Logger, create a new instance of LogOperations for each loop that we want to log, and attach it to the logged loop. This allows us to print later an operation from the loop, without the need for the global 'memo' field which breaks test_free_object. diff --git a/pypy/jit/metainterp/optimizeopt/fficall.py b/pypy/jit/metainterp/optimizeopt/fficall.py --- a/pypy/jit/metainterp/optimizeopt/fficall.py +++ b/pypy/jit/metainterp/optimizeopt/fficall.py @@ -73,7 +73,10 @@ def setup(self): self.funcinfo = None - self.logger = self.optimizer.metainterp_sd.logger_ops + if self.optimizer.loop is not None: + self.logops = self.optimizer.loop.logops + else: + self.logops = None def propagate_begin_forward(self): debug_start('jit-log-ffiopt') @@ -100,8 +103,8 @@ # # we immediately set funcinfo to None to prevent recursion when # calling emit_op - if have_debug_prints(): - debug_print('rollback: ' + msg + ': ', self.logger.repr_of_op(op)) + if self.logops is not None: + debug_print('rollback: ' + msg + ': ', self.logops.repr_of_op(op)) funcinfo = self.funcinfo self.funcinfo = None self.emit_operation(funcinfo.prepare_op) @@ -198,8 +201,8 @@ return ops def propagate_forward(self, op): - if have_debug_prints(): - debug_print(self.logger.repr_of_op(op)) + if self.logops is not None: + debug_print(self.logops.repr_of_op(op)) opnum = op.getopnum() for value, func in optimize_ops: if opnum == value: diff --git a/pypy/jit/metainterp/logger.py b/pypy/jit/metainterp/logger.py --- a/pypy/jit/metainterp/logger.py +++ b/pypy/jit/metainterp/logger.py @@ -10,45 +10,59 @@ class Logger(object): def __init__(self, metainterp_sd, guard_number=False): - """ - resoperation logger. Note that you should call repr_of_op only - *after* the corresponding loop has been fully logged, else you might - get different results (in particular, variable numbers could be - different) - """ self.metainterp_sd = metainterp_sd - self.ts = metainterp_sd.cpu.ts self.guard_number = guard_number - self.memo = {} def log_loop(self, inputargs, operations, number=0, type=None): if type is None: debug_start("jit-log-noopt-loop") - self._log_operations(inputargs, operations) + logops = self._log_operations(inputargs, operations) debug_stop("jit-log-noopt-loop") else: debug_start("jit-log-opt-loop") debug_print("# Loop", number, ":", type, "with", len(operations), "ops") - self._log_operations(inputargs, operations) + logops = self._log_operations(inputargs, operations) debug_stop("jit-log-opt-loop") + return logops def log_bridge(self, inputargs, operations, number=-1): if number == -1: debug_start("jit-log-noopt-bridge") - self._log_operations(inputargs, operations) + logops = self._log_operations(inputargs, operations) debug_stop("jit-log-noopt-bridge") else: debug_start("jit-log-opt-bridge") debug_print("# bridge out of Guard", number, "with", len(operations), "ops") - self._log_operations(inputargs, operations) + logops = self._log_operations(inputargs, operations) debug_stop("jit-log-opt-bridge") + return logops def log_short_preamble(self, inputargs, operations): debug_start("jit-log-short-preamble") - self._log_operations(inputargs, operations) - debug_stop("jit-log-short-preamble") + logops = self._log_operations(inputargs, operations) + debug_stop("jit-log-short-preamble") + return logops + + def _log_operations(self, inputargs, operations): + if not have_debug_prints(): + return None + logops = LogOperations(self.metainterp_sd, self.guard_number) + logops.log_operations(inputargs, operations) + return logops + + +class LogOperations(object): + """ + ResOperation logger. Each instance contains a memo giving numbers + to boxes, and is typically used to log a single loop. + """ + def __init__(self, metainterp_sd, guard_number): + self.metainterp_sd = metainterp_sd + self.ts = metainterp_sd.cpu.ts + self.guard_number = guard_number + self.memo = {} def repr_of_descr(self, descr): return descr.repr_of_descr() @@ -104,10 +118,7 @@ fail_args = '' return res + op.getopname() + '(' + args + ')' + fail_args - def _log_operations(self, inputargs, operations): - self.memo = {} - if not have_debug_prints(): - return + def log_operations(self, inputargs, operations): if inputargs is not None: args = ", ".join([self.repr_of_arg(arg) for arg in inputargs]) debug_print('[' + args + ']') diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py --- a/pypy/jit/metainterp/history.py +++ b/pypy/jit/metainterp/history.py @@ -792,6 +792,7 @@ operations = None token = None call_pure_results = None + logops = None def __init__(self, name): self.name = name diff --git a/pypy/jit/metainterp/optimize.py b/pypy/jit/metainterp/optimize.py --- a/pypy/jit/metainterp/optimize.py +++ b/pypy/jit/metainterp/optimize.py @@ -14,7 +14,8 @@ def _optimize_loop(metainterp_sd, old_loop_tokens, loop, enable_opts): cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) + loop.logops = metainterp_sd.logger_noopt.log_loop(loop.inputargs, + loop.operations) # XXX do we really still need a list? if old_loop_tokens: return old_loop_tokens[0] @@ -36,7 +37,8 @@ def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, enable_opts, inline_short_preamble, retraced=False): cpu = metainterp_sd.cpu - metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) + bridge.logops = metainterp_sd.logger_noopt.log_loop(bridge.inputargs, + bridge.operations) if old_loop_tokens: old_loop_token = old_loop_tokens[0] bridge.operations[-1].setdescr(old_loop_token) # patch jump target From commits-noreply at bitbucket.org Thu Mar 24 10:10:04 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 10:10:04 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Fix test_logger.py. Message-ID: <20110324091004.A82E82A2031@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42894:e6cce505ab63 Date: 2011-03-24 10:05 +0100 http://bitbucket.org/pypy/pypy/changeset/e6cce505ab63/ Log: Fix test_logger.py. diff --git a/pypy/jit/metainterp/logger.py b/pypy/jit/metainterp/logger.py --- a/pypy/jit/metainterp/logger.py +++ b/pypy/jit/metainterp/logger.py @@ -48,10 +48,14 @@ def _log_operations(self, inputargs, operations): if not have_debug_prints(): return None - logops = LogOperations(self.metainterp_sd, self.guard_number) + logops = self._make_log_operations() logops.log_operations(inputargs, operations) return logops + def _make_log_operations(self): + # hook for tests + return LogOperations(self.metainterp_sd, self.guard_number) + class LogOperations(object): """ diff --git a/pypy/jit/metainterp/test/test_logger.py b/pypy/jit/metainterp/test/test_logger.py --- a/pypy/jit/metainterp/test/test_logger.py +++ b/pypy/jit/metainterp/test/test_logger.py @@ -36,11 +36,16 @@ return capturing(logger.Logger.log_loop, self, loop.inputargs, loop.operations) - def repr_of_descr(self, descr): - for k, v in self.namespace.items(): - if v == descr: - return k - return descr.repr_of_descr() + def _make_log_operations(self1): + class LogOperations(logger.LogOperations): + def repr_of_descr(self, descr): + for k, v in self1.namespace.items(): + if v == descr: + return k + return descr.repr_of_descr() + logops = LogOperations(self1.metainterp_sd, self1.guard_number) + self1.logops = logops + return logops class TestLogger(object): ts = llhelper @@ -188,4 +193,4 @@ ''' logger, loop, _ = self.reparse(inp) op = loop.operations[1] - assert logger.repr_of_op(op) == "i8 = int_add(i6, 3)" + assert logger.logops.repr_of_op(op) == "i8 = int_add(i6, 3)" From commits-noreply at bitbucket.org Thu Mar 24 10:33:13 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 10:33:13 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Imports clean-up. Message-ID: <20110324093313.754252A202E@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42895:7a0045afcd5d Date: 2011-03-24 10:32 +0100 http://bitbucket.org/pypy/pypy/changeset/7a0045afcd5d/ Log: Imports clean-up. diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -5,21 +5,16 @@ soon as possible (at least in a simple case). """ -import weakref, random +import weakref import py from pypy.annotation import policy as annpolicy from pypy.rlib import rgc from pypy.rpython.lltypesystem import lltype, llmemory, rffi -from pypy.rpython.lltypesystem.lloperation import llop from pypy.rlib.jit import JitDriver, dont_look_inside from pypy.rlib.jit import purefunction, unroll_safe -from pypy.jit.backend.x86.runner import CPU386 -from pypy.jit.backend.llsupport.gc import GcRefList, GcRootMap_asmgcc from pypy.jit.backend.llsupport.gc import GcLLDescr_framework from pypy.tool.udir import udir -from pypy.jit.backend.x86.arch import IS_X86_64 from pypy.config.translationoption import DEFL_GC -import py.test class X(object): def __init__(self, x=0): From commits-noreply at bitbucket.org Thu Mar 24 11:29:07 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 11:29:07 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: On x86-64, put the correct dfi assembler macros to allow gdb Message-ID: <20110324102907.76CBA2A202E@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42896:f5f58e61db15 Date: 2011-03-24 11:28 +0100 http://bitbucket.org/pypy/pypy/changeset/f5f58e61db15/ Log: On x86-64, put the correct dfi assembler macros to allow gdb to debug the function (and walk past it in the backtrace). diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py --- a/pypy/translator/c/gcc/trackgcroot.py +++ b/pypy/translator/c/gcc/trackgcroot.py @@ -1647,6 +1647,7 @@ print >> output, """\ /* See description in asmgcroot.py */ + .cfi_startproc movq\t%rdi, %rdx\t/* 1st argument, which is the callback */ movq\t%rsi, %rcx\t/* 2nd argument, which is gcrootanchor */ movq\t%rsp, %rax\t/* my frame top address */ @@ -1666,6 +1667,7 @@ pushq\t%rcx\t\t\t/* self->prev = gcrootanchor */ movq\t%rsp, 8(%rcx)\t/* gcrootanchor->next = self */ movq\t%rsp, 0(%rax)\t\t\t/* next->prev = self */ + .cfi_def_cfa_offset 80\t/* 9 pushes + the retaddr = 80 bytes */ /* note: the Mac OS X 16 bytes aligment must be respected. */ call\t*%rdx\t\t/* invoke the callback */ @@ -1687,6 +1689,7 @@ /* the return value is the one of the 'call' above, */ /* because %rax (and possibly %rdx) are unmodified */ ret + .cfi_endproc """ _variant(elf64='.size pypy_asm_stackwalk, .-pypy_asm_stackwalk', darwin64='') From commits-noreply at bitbucket.org Thu Mar 24 12:22:51 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 24 Mar 2011 12:22:51 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: introduce 'typed pointers', which carry info on which type they are pointing to; also, pass the argtype to the _as_ffi_pointer_ method; this should allow _ctypes to do type check on pointers when converting arguments Message-ID: <20110324112251.B7C5F2A202E@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42897:888a6561b67a Date: 2011-03-24 12:22 +0100 http://bitbucket.org/pypy/pypy/changeset/888a6561b67a/ Log: introduce 'typed pointers', which carry info on which type they are pointing to; also, pass the argtype to the _as_ffi_pointer_ method; this should allow _ctypes to do type check on pointers when converting arguments diff --git a/pypy/module/_ffi/test/test__ffi.py b/pypy/module/_ffi/test/test__ffi.py --- a/pypy/module/_ffi/test/test__ffi.py +++ b/pypy/module/_ffi/test/test__ffi.py @@ -169,7 +169,8 @@ class MyPointerWrapper(object): def __init__(self, value): self.value = value - def _as_ffi_pointer_(self): + def _as_ffi_pointer_(self, ffitype): + assert ffitype is types.pointer return self.value libfoo = CDLL(self.libfoo_name) @@ -186,6 +187,31 @@ assert get_dummy() == 123 set_val_to_ptr(ptr2, 0) + def test_typed_pointer(self): + from _ffi import types + intptr = types.Pointer(types.sint) # create a typed pointer to sint + assert intptr.deref_pointer() is types.sint + assert str(intptr) == '' + assert types.sint.deref_pointer() is None + + def test_typed_pointer_args(self): + """ + extern int dummy; // defined in test_void_result + DLLEXPORT int* get_dummy_ptr(); // defined in test_pointer_args + DLLEXPORT void set_val_to_ptr(int* ptr, int val); // ditto + """ + from _ffi import CDLL, types + + libfoo = CDLL(self.libfoo_name) + intptr = types.Pointer(types.sint) + get_dummy = libfoo.getfunc('get_dummy', [], types.sint) + get_dummy_ptr = libfoo.getfunc('get_dummy_ptr', [], intptr) + set_val_to_ptr = libfoo.getfunc('set_val_to_ptr', [intptr, types.sint], types.void) + assert get_dummy() == 0 + ptr = get_dummy_ptr() + set_val_to_ptr(ptr, 123) + assert get_dummy() == 123 + set_val_to_ptr(ptr, 0) def test_huge_pointer_args(self): """ diff --git a/pypy/module/_ffi/interp_ffi.py b/pypy/module/_ffi/interp_ffi.py --- a/pypy/module/_ffi/interp_ffi.py +++ b/pypy/module/_ffi/interp_ffi.py @@ -14,17 +14,23 @@ from pypy.rlib.rarithmetic import intmask, r_uint class W_FFIType(Wrappable): - def __init__(self, name, ffitype, w_datashape=None): + def __init__(self, name, ffitype, w_datashape=None, w_pointer_to=None): self.name = name self.ffitype = ffitype self.w_datashape = w_datashape + self.w_pointer_to = w_pointer_to if self.is_struct(): assert w_datashape is not None - def str(self, space): + def descr_deref_pointer(self, space): + if self.w_pointer_to is None: + return space.w_None + return self.w_pointer_to + + def repr(self, space): return space.wrap(self.__str__()) - def __str__(self): + def __repr__(self): return "" % self.name def is_signed(self): @@ -42,7 +48,7 @@ self is app_types.ulonglong) def is_pointer(self): - return self is app_types.pointer + return self.ffitype is libffi.types.pointer def is_char(self): return self is app_types.char @@ -68,7 +74,8 @@ W_FFIType.typedef = TypeDef( 'FFIType', - __str__ = interp2app(W_FFIType.str), + __repr__ = interp2app(W_FFIType.repr), + deref_pointer = interp2app(W_FFIType.descr_deref_pointer), ) @@ -113,10 +120,15 @@ pass app_types.__dict__ = build_ffi_types() +def descr_new_pointer(space, w_cls, w_pointer_to): + name = '(pointer to %s)' % w_pointer_to.name + return W_FFIType(name, libffi.types.pointer, w_pointer_to = w_pointer_to) + class W_types(Wrappable): pass W_types.typedef = TypeDef( 'types', + Pointer = interp2app(descr_new_pointer, as_classmethod=True), **app_types.__dict__) @@ -164,7 +176,7 @@ elif w_argtype.is_signed(): argchain.arg(space.int_w(w_arg)) elif w_argtype.is_pointer(): - w_arg = self.convert_pointer_arg_maybe(space, w_arg) + w_arg = self.convert_pointer_arg_maybe(space, w_arg, w_argtype) argchain.arg(intmask(space.uint_w(w_arg))) elif w_argtype.is_unsigned(): argchain.arg(intmask(space.uint_w(w_arg))) @@ -187,13 +199,13 @@ assert False, "Argument shape '%s' not supported" % w_argtype return argchain - def convert_pointer_arg_maybe(self, space, w_arg): + def convert_pointer_arg_maybe(self, space, w_arg, w_argtype): """ Try to convert the argument by calling _as_ffi_pointer_() """ meth = space.lookup(w_arg, '_as_ffi_pointer_') # this also promotes the type if meth: - return space.call_function(meth, w_arg) + return space.call_function(meth, w_arg, w_argtype) else: return w_arg From commits-noreply at bitbucket.org Thu Mar 24 12:22:51 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 24 Mar 2011 12:22:51 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: merge heads Message-ID: <20110324112251.F19C32A202F@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42898:e4b684baf9e4 Date: 2011-03-24 12:22 +0100 http://bitbucket.org/pypy/pypy/changeset/e4b684baf9e4/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 24 13:45:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 13:45:32 +0100 (CET) Subject: [pypy-svn] pypy default: Bug found with targetbf.py: the second setfield_gc() is missing. Message-ID: <20110324124532.9138E2A202F@codespeak.net> Author: Armin Rigo Branch: Changeset: r42899:ebcb1b1a59e2 Date: 2011-03-24 13:45 +0100 http://bitbucket.org/pypy/pypy/changeset/ebcb1b1a59e2/ Log: Bug found with targetbf.py: the second setfield_gc() is missing. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -2356,6 +2356,33 @@ """ self.optimize_loop(ops, expected, preamble) + def test_bug_5(self): + ops = """ + [p0] + i0 = escape() + i2 = getfield_gc(p0, descr=valuedescr) + i4 = int_add(i2, 1) + setfield_gc(p0, i4, descr=valuedescr) + guard_true(i0) [] + i6 = getfield_gc(p0, descr=valuedescr) + i8 = int_sub(i6, 1) + setfield_gc(p0, i8, descr=valuedescr) + escape() + jump(p0) + """ + expected = """ + [p0] + i0 = escape() + i2 = getfield_gc(p0, descr=valuedescr) + i4 = int_add(i2, 1) + setfield_gc(p0, i4, descr=valuedescr) + guard_true(i0) [] + setfield_gc(p0, i2, descr=valuedescr) + escape() + jump(p0) + """ + self.optimize_loop(ops, expected) + def test_invalid_loop_1(self): ops = """ [p1] From commits-noreply at bitbucket.org Thu Mar 24 14:55:22 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 24 Mar 2011 14:55:22 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix translation Message-ID: <20110324135522.D451F2A2030@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42900:54d5bca7974e Date: 2011-03-24 12:36 +0100 http://bitbucket.org/pypy/pypy/changeset/54d5bca7974e/ Log: fix translation diff --git a/pypy/module/_ffi/test/test__ffi.py b/pypy/module/_ffi/test/test__ffi.py --- a/pypy/module/_ffi/test/test__ffi.py +++ b/pypy/module/_ffi/test/test__ffi.py @@ -193,6 +193,7 @@ assert intptr.deref_pointer() is types.sint assert str(intptr) == '' assert types.sint.deref_pointer() is None + raises(TypeError, "types.Pointer(42)") def test_typed_pointer_args(self): """ diff --git a/pypy/module/_ffi/interp_ffi.py b/pypy/module/_ffi/interp_ffi.py --- a/pypy/module/_ffi/interp_ffi.py +++ b/pypy/module/_ffi/interp_ffi.py @@ -121,6 +121,7 @@ app_types.__dict__ = build_ffi_types() def descr_new_pointer(space, w_cls, w_pointer_to): + w_pointer_to = space.interp_w(W_FFIType, w_pointer_to) name = '(pointer to %s)' % w_pointer_to.name return W_FFIType(name, libffi.types.pointer, w_pointer_to = w_pointer_to) From commits-noreply at bitbucket.org Thu Mar 24 14:55:23 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 24 Mar 2011 14:55:23 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: typo Message-ID: <20110324135523.66A3A2A2030@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42901:51e0b1c5e3aa Date: 2011-03-24 14:55 +0100 http://bitbucket.org/pypy/pypy/changeset/51e0b1c5e3aa/ Log: typo diff --git a/pypy/module/_ffi/interp_ffi.py b/pypy/module/_ffi/interp_ffi.py --- a/pypy/module/_ffi/interp_ffi.py +++ b/pypy/module/_ffi/interp_ffi.py @@ -28,7 +28,7 @@ return self.w_pointer_to def repr(self, space): - return space.wrap(self.__str__()) + return space.wrap(self.__repr__()) def __repr__(self): return "" % self.name From commits-noreply at bitbucket.org Thu Mar 24 15:19:38 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 15:19:38 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: - improve fixed-width font Message-ID: <20110324141938.4D78F2A2030@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3393:6b0d386fb06e Date: 2011-03-24 14:55 +0100 http://bitbucket.org/pypy/extradoc/changeset/6b0d386fb06e/ Log: - improve fixed-width font - turn some of the code into figures diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -6,91 +6,10 @@ \usepackage{color} \usepackage{ulem} \usepackage{xspace} +\usepackage[scaled=0.8]{beramono} \usepackage[utf8]{inputenc} -\makeatletter -\def\PY at reset{\let\PY at it=\relax \let\PY at bf=\relax% - \let\PY at ul=\relax \let\PY at tc=\relax% - \let\PY at bc=\relax \let\PY at ff=\relax} -\def\PY at tok#1{\csname PY at tok@#1\endcsname} -\def\PY at toks#1+{\ifx\relax#1\empty\else% - \PY at tok{#1}\expandafter\PY at toks\fi} -\def\PY at do#1{\PY at bc{\PY at tc{\PY at ul{% - \PY at it{\PY at bf{\PY at ff{#1}}}}}}} -\def\PY#1#2{\PY at reset\PY at toks#1+\relax+\PY at do{#2}} - -\def\PY at tok@gd{\def\PY at bc##1{\fcolorbox[rgb]{0.80,0.00,0.00}{1.00,0.80,0.80}{##1}}} -\def\PY at tok@gu{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}} -\def\PY at tok@gt{\def\PY at tc##1{\textcolor[rgb]{0.60,0.80,0.40}{##1}}} -\def\PY at tok@gs{\let\PY at bf=\textbf} -\def\PY at tok@gr{\def\PY at tc##1{\textcolor[rgb]{1.00,0.00,0.00}{##1}}} -\def\PY at tok@cm{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} -\def\PY at tok@vg{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} -\def\PY at tok@m{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@mh{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@cs{\let\PY at bf=\textbf\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} -\def\PY at tok@ge{\let\PY at it=\textit} -\def\PY at tok@vc{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} -\def\PY at tok@il{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@go{\def\PY at tc##1{\textcolor[rgb]{0.67,0.67,0.67}{##1}}} -\def\PY at tok@cp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,0.60}{##1}}} -\def\PY at tok@gi{\def\PY at bc##1{\fcolorbox[rgb]{0.00,0.80,0.00}{0.80,1.00,0.80}{##1}}} -\def\PY at tok@gh{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.00}{##1}}} -\def\PY at tok@ni{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,0.60}{##1}}} -\def\PY at tok@nl{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}} -\def\PY at tok@nn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.80,1.00}{##1}}} -\def\PY at tok@no{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.00}{##1}}} -\def\PY at tok@na{\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}} -\def\PY at tok@nb{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}} -\def\PY at tok@nc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.67,0.53}{##1}}} -\def\PY at tok@nd{\def\PY at tc##1{\textcolor[rgb]{0.60,0.60,1.00}{##1}}} -\def\PY at tok@ne{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,0.00}{##1}}} -\def\PY at tok@nf{\def\PY at tc##1{\textcolor[rgb]{0.80,0.00,1.00}{##1}}} -\def\PY at tok@si{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}} -\def\PY at tok@s2{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@vi{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} -\def\PY at tok@nt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.20,0.00,0.60}{##1}}} -\def\PY at tok@nv{\def\PY at tc##1{\textcolor[rgb]{0.00,0.20,0.20}{##1}}} -\def\PY at tok@s1{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@gp{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.60}{##1}}} -\def\PY at tok@sh{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@ow{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.00,0.00}{##1}}} -\def\PY at tok@sx{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@bp{\def\PY at tc##1{\textcolor[rgb]{0.20,0.40,0.40}{##1}}} -\def\PY at tok@c1{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} -\def\PY at tok@kc{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@c{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.00,0.60,1.00}{##1}}} -\def\PY at tok@mf{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@err{\def\PY at tc##1{\textcolor[rgb]{0.67,0.00,0.00}{##1}}\def\PY at bc##1{\colorbox[rgb]{1.00,0.67,0.67}{##1}}} -\def\PY at tok@kd{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@ss{\def\PY at tc##1{\textcolor[rgb]{1.00,0.80,0.20}{##1}}} -\def\PY at tok@sr{\def\PY at tc##1{\textcolor[rgb]{0.20,0.67,0.67}{##1}}} -\def\PY at tok@mo{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@mi{\def\PY at tc##1{\textcolor[rgb]{1.00,0.40,0.00}{##1}}} -\def\PY at tok@kn{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@o{\def\PY at tc##1{\textcolor[rgb]{0.33,0.33,0.33}{##1}}} -\def\PY at tok@kr{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@s{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@kp{\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@w{\def\PY at tc##1{\textcolor[rgb]{0.73,0.73,0.73}{##1}}} -\def\PY at tok@kt{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.47,0.53}{##1}}} -\def\PY at tok@sc{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@sb{\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@k{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.00,0.40,0.60}{##1}}} -\def\PY at tok@se{\let\PY at bf=\textbf\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} -\def\PY at tok@sd{\let\PY at it=\textit\def\PY at tc##1{\textcolor[rgb]{0.80,0.20,0.00}{##1}}} - -\def\PYZbs{\char`\\} -\def\PYZus{\char`\_} -\def\PYZob{\char`\{} -\def\PYZcb{\char`\}} -\def\PYZca{\char`\^} -% for compatibility with earlier versions -\def\PYZat{@} -\def\PYZlb{[} -\def\PYZrb{]} -\makeatother - +\input{code/style.tex} \ifthenelse{\isundefined{\hypersetup}}{ \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref} @@ -100,7 +19,7 @@ } \newboolean{showcomments} -\setboolean{showcomments}{false} +\setboolean{showcomments}{true} \ifthenelse{\boolean{showcomments}} {\newcommand{\nb}[2]{ \fbox{\bfseries\sffamily\scriptsize#1} @@ -167,31 +86,32 @@ \end{abstract} +%___________________________________________________________________________ \section{Introduction} +%___________________________________________________________________________ \section{The PyPy Project} \label{sect:pypy} XXX +\cite{armin_rigo_pypys_2006} +%___________________________________________________________________________ \section{Tracing JIT Compilers} \label{sect:tracing} XXX +%___________________________________________________________________________ \section{Controlling The Extent of Tracing} -The question I was asked most often during my recent \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{US trip} was how exactly +XXX how exactly the hints work that interpreter authors can use to improve the execution speed -of the programs running on their interpreters. Since those hints are not really -documented all that well, I decided to write blog posts about them. This is the -first one. +of the programs running on their interpreters? -%___________________________________________________________________________ - \subsection{Background} First, let's recap some basics: PyPy's approach to implementing dynamic @@ -201,7 +121,7 @@ large number of generated C functions and some data. Similarly, the user program consists of functions in the language the interpreter executes. -As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a +XXX As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a meta-tracer. Since we want to re-use our tracer for a variety of languages, we don't trace the execution of the user program, but instead trace the execution of the \emph{interpreter} that is running the program. This means that the traces @@ -230,8 +150,6 @@ in the user program. -%___________________________________________________________________________ - \subsection{How Far Should Tracing Go} When the tracer encounters a function call at the interpreter level, e.g. the @@ -282,8 +200,6 @@ meaning that the trace with unrolling is not run to completion in most cases. -%___________________________________________________________________________ - \subsection{Influencing the Default Behaviour} Sometimes the default behaviour is not actually what is wanted. This is @@ -304,12 +220,10 @@ If the interpreter author finds false negatives or false positives, she can fix that by applying a hint to the tracer. These hints take the form of function -decorators (which both live in the \texttt{pypy.rlib.jit} module). In the next two -subsections I will describe these two function decorators and their use. +decorators (which both live in the \Verb|pypy.rlib.jit| module). In the next two +subsections we describe these two function decorators and their use. -%___________________________________________________________________________ - \subsubsection{Unrolling Functions With Loops} The first decorator, used to fix false negatives, is the \texttt{unroll\_safe} @@ -345,15 +259,13 @@ with the \texttt{unroll\_safe} decorator. -%___________________________________________________________________________ - \subsubsection{Preventing the Tracing of Functions} The second decorator \texttt{dont\_look\_inside} is used to fix false positives. It tells the JIT to never trace into the decorated function and just always produce a residual call instead. This decorator is in many ways less important than the -unrolling one (except for a special situation that I will describe in a -follow-up post). It is used if tracing into a function is not expected to yield +unrolling one (except for a special situation that is described in +Section XXX). It is used if tracing into a function is not expected to yield any speed benefits, because the optimizer will not be able to improve it much. This is often the case if the called helper function does not contain any ``dynamic'' behaviour. In such a situation it is better to just leave the function @@ -365,33 +277,29 @@ \texttt{dont\_look\_inside}. -%___________________________________________________________________________ - \subsection{Conclusion} -In this post we discussed two hints that can be used to control precisely which +In this section we discussed two hints that can be used to control precisely which parts of the interpreter should be meta-traced. If these hints are used carefully, this can go a long way to making the interpreter produce traces that contain exactly the interesting part of the execution, and will contain calls to the functions that can not be optimized by tracing techniques. -In the next part of this series I will discuss a different set of hints that can +In the next section we discuss a different set of hints that can be used to strongly optimize traces. +%___________________________________________________________________________ -% Document title -\section{Controlling the Tracing of an Interpreter With Hints, Part 2: Controlling Optimization} +\section{Controlling Optimization} -This is part 2 of a series on how to speed up an interpreter written with PyPy -by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control the -extent of tracing}. In this post I will describe how to add hints that +The last section described how to control the +extent of tracing. In this section we will describe how to add hints that influence the optimizer. If applied correctly these techniques can give really big speedups by pre-computing parts of what happens at runtime. On the other hand, if applied incorrectly they might lead to code bloat, thus making the resulting program actually slower. -%___________________________________________________________________________ \subsection{Background} @@ -402,37 +310,27 @@ JIT because it only has to deal with linear traces. Among the techniques: % \begin{itemize} - -\item \href{http://en.wikipedia.org/wiki/Constant_folding}{constant folding} - -\item \href{http://en.wikipedia.org/wiki/Common_subexpression_elimination}{common subexpression elimination} - -\item allocation removal, as described in the paper that I recently \href{http://morepypy.blogspot.com/2011/03/us-trip-report-popl-microsoft-ibm.html}{presented at -PEPM} - -\item store/load propagation - -\item \href{http://morepypy.blogspot.com/2011/01/loop-invariant-code-motion.html}{loop invariant code motion} - + \item constant folding + \item common subexpression elimination + \item allocation removal \cite{bolz_allocation_2011} + \item store/load propagation + \item loop invariant code motion \end{itemize} In some places it turns out that if the interpreter author rewrites some parts of the interpreter with these optimizations in mind the traces that are produced by the optimizer can be vastly improved. -In this post I will describe two hints that allow the interpreter author to +In this section we describe two hints that allow the interpreter author to increase the optimization opportunities for constant folding. For constant folding to work, two conditions need to be met: % \begin{itemize} - -\item the arguments of an operation actually need to all be constant, -i.e. statically known by the optimizer - -\item the operation needs to be \emph{pure}, i.e. always yield the same result given -the same arguments. - + \item the arguments of an operation actually need to all be constant, + i.e. statically known by the optimizer + \item the operation needs to be \emph{pure}, i.e. always yield the same result given + the same arguments. \end{itemize} The PyPy JIT generator automatically detects the majority of these conditions. @@ -444,8 +342,6 @@ RPython source of the interpreter. Normal Python users will never see them. -%___________________________________________________________________________ - \subsection{Where Do All the Constants Come From} It is worth clarifying what is a ``constant'' in this context. A variable of @@ -579,12 +475,10 @@ program. An example would be the types of variables in a user function. Even though in principle the argument to a Python function could be any Python type, in practice the argument types tend to not vary often. Therefore it is possible to -promote the types. In the next blog post I will give a complete example of how +promote the types. The next section will present a complete example of how this works. -%___________________________________________________________________________ - \subsection{Declaring New Pure Operations} In the last section we saw a way to turn arbitrary variables into constants. All @@ -683,8 +577,6 @@ annotation. -%___________________________________________________________________________ - \subsubsection{Observably Pure Functions} Why can't we simply write an analysis to find out that the \texttt{x} fields of the @@ -700,7 +592,6 @@ of this function needs to be annotated. -%___________________________________________________________________________ \subsubsection{Immutable Fields} @@ -711,23 +602,21 @@ to using getters and annotating them with \texttt{purefunction}. -%___________________________________________________________________________ \subsection{Conclusion} -In this blog post I explained two more hints that can be used in the source code +In this section we presented two more hints that can be used in the source code of the interpreter. They are used to influence what the optimizer does with the -trace. I realize the examples given here are a bit too small, in the next -installment I will give a worked-out example that puts all the pieces together. +trace. The examples given here are a bit too small, the next +section gives a worked-out example that puts all the pieces together. -\section{Controlling the Tracing of an Interpreter With Hints, Part 3: Putting Things Together} +%___________________________________________________________________________ -This is part 3 of the series on how to speed up an interpreter written with -PyPy by adding JIT hints to the interpreter. Part 1 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with.html}{control -the extent of tracing}. Part 2 described how to \href{http://morepypy.blogspot.com/2011/03/controlling-tracing-of-interpreter-with_15.html}{influence the optimizer with -promotion and pure functions}. In this post I describe a worked-out example of +\section{Putting Things Together} + +In this section we describe a worked-out example of a small object model for a dynamic language and how to make it efficient using -the hints described in the previous posts. +the hints described in the previous sections. %___________________________________________________________________________ @@ -739,7 +628,7 @@ dictionaries everywhere. Let's look at an example of how the JIT can be made to optimize such operations. -For the purpose of this blog post we will use a very simple and bare-bones +For the purpose of this section we will use a very simple and bare-bones object model that just supports very simple classes and instances, without any inheritance or any fancy features. The model has classes, which contain methods. Instances have a class. Instances have their own attributes. When looking up an @@ -748,45 +637,13 @@ To implement this object model, we could use the following RPython code as part of the interpreter source code: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{k}{def} \PY{n+nf}{instantiate}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n}{Instance}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)} +\begin{figure} +\input{code/interpreter-slow.tex} +\caption{Original Version of a Simple Object Model} +\label{fig:interpreter-slow} +\end{figure} - \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} - - -\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - - \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} - - \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{try}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} -\end{Verbatim} In this straightforward implementation the methods and attributes are just stored in dictionaries on the classes/instances. While this object model is very @@ -806,34 +663,12 @@ \end{Verbatim} The trace could look like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{c}{# inst.getattr("a")} -\PY{n}{attributes1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} -\PY{n}{result1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result1} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{c}{# inst.getattr("b")} -\PY{n}{attributes2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} -\PY{n}{v1} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{v1} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods1} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} -\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} - -\PY{c}{# inst.getattr("c")} -\PY{n}{attributes3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{attributes} -\PY{n}{v3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{attributes3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{v3} \PY{o+ow}{is} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods2} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} -\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} - -\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} -\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/trace1.tex} +\caption{Trace Through the Object Model} +\label{fig:trace1} +\end{figure} In this example, the attribute \texttt{a} is found on the instance, but the attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains @@ -846,7 +681,7 @@ The first step in making \texttt{getattr} faster in our object model is to optimize away the dictionary lookups on the instances. The hints we have looked at in the -two earlier blog posts don't seem to help with the current object model. There is +two previous sections don't seem to help with the current object model. There is no pure function to be seen, and the instance is not a candidate for promotion, because there tend to be many instances. @@ -859,61 +694,16 @@ Therefore it makes sense to factor the layout information out of the instance implementation into a shared object. This shared layout object is called a -\emph{map}. Maps are an old idea that comes originally from the SELF language. They are -also used by many JavaScript implementations such as V8. I've \href{http://morepypy.blogspot.com/2010/11/efficiently-implementing-python-objects.html}{written about maps -before}, so I won't explain them fully again. +\emph{map}. Maps are an old idea that comes originally from the SELF language \cite{XXX}. They are +also used by many JavaScript implementations such as V8. The rewritten \texttt{Instance} class using maps looks like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} - - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:} - \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} - \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)} - \PY{n}{newmap}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attribute\PYZus{}indexes}\PY{p}{)} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} - - -\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} - -\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]} - - \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value} - \PY{k}{return} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{new\PYZus{}map\PYZus{}with\PYZus{}additional\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{try}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/map.tex} +\caption{Simple Object Model With Maps} +\label{fig:maps} +\end{figure} Instances no longer use dictionaries to store their fields. Instead, they have a reference to a map, which maps field names to indexes into a storage list. The @@ -1175,13 +965,11 @@ \subsection{Conclusion} -In this post I showed how to use \texttt{purefunction} and \texttt{promote} to make a +In this section we saw how to use \texttt{purefunction} and \texttt{promote} to make a small but still relevant dynamic object model no longer use any dictionary lookups after tracing. Instead a number of guards are inserted into the trace to check whether the assumptions about the objects are still true. This -makes operations on objects seriously faster. I plan to write another small post -that shows the speed benefits for PyPy's Python interpreter for exactly these -operations. +makes operations on objects seriously faster. \section{Evaluation} \label{sect:evaluation} diff --git a/talk/icooolps2011/Makefile b/talk/icooolps2011/Makefile --- a/talk/icooolps2011/Makefile +++ b/talk/icooolps2011/Makefile @@ -1,5 +1,5 @@ -jit-hints.pdf: paper.tex paper.bib +jit-hints.pdf: paper.tex paper.bib code/interpreter-slow.tex code/map.tex pdflatex paper bibtex paper pdflatex paper @@ -11,3 +11,6 @@ xpdf: jit-hints.pdf xpdf jit-hints.pdf & + +%.tex: %.py + pygmentize -l python -o $@ $< diff --git a/talk/icooolps2011/code/map.tex b/talk/icooolps2011/code/map.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/map.tex @@ -0,0 +1,49 @@ +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{add\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:} + \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} + \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)} + \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} + +\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} + +\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]} + + \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value} + \PY{k}{return} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{add\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{try}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} +\end{Verbatim} diff --git a/talk/icooolps2011/code/style.tex b/talk/icooolps2011/code/style.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/style.tex @@ -0,0 +1,57 @@ + +\makeatletter +\def\PY at reset{\let\PY at it=\relax \let\PY at bf=\relax% + \let\PY at ul=\relax \let\PY at tc=\relax% + \let\PY at bc=\relax \let\PY at ff=\relax} +\def\PY at tok#1{\csname PY at tok@#1\endcsname} +\def\PY at toks#1+{\ifx\relax#1\empty\else% + \PY at tok{#1}\expandafter\PY at toks\fi} +\def\PY at do#1{\PY at bc{\PY at tc{\PY at ul{% + \PY at it{\PY at bf{\PY at ff{#1}}}}}}} +\def\PY#1#2{\PY at reset\PY at toks#1+\relax+\PY at do{#2}} + +\def\PY at tok@gu{\let\PY at bf=\textbf} +\def\PY at tok@gs{\let\PY at bf=\textbf} +\def\PY at tok@cm{\let\PY at it=\textit} +\def\PY at tok@gp{\let\PY at bf=\textbf} +\def\PY at tok@ge{\let\PY at it=\textit} +\def\PY at tok@cs{\let\PY at it=\textit} +\def\PY at tok@gh{\let\PY at bf=\textbf} +\def\PY at tok@ni{\let\PY at bf=\textbf} +\def\PY at tok@nn{\let\PY at bf=\textbf} +\def\PY at tok@s2{\let\PY at it=\textit} +\def\PY at tok@s1{\let\PY at it=\textit} +\def\PY at tok@nc{\let\PY at bf=\textbf} +\def\PY at tok@ne{\let\PY at bf=\textbf} +\def\PY at tok@si{\let\PY at bf=\textbf\let\PY at it=\textit} +\def\PY at tok@nt{\let\PY at bf=\textbf} +\def\PY at tok@ow{\let\PY at bf=\textbf} +\def\PY at tok@c1{\let\PY at it=\textit} +\def\PY at tok@kc{\let\PY at bf=\textbf} +\def\PY at tok@c{\let\PY at it=\textit} +\def\PY at tok@sx{\let\PY at it=\textit} +\def\PY at tok@err{\def\PY at bc##1{\fcolorbox[rgb]{1.00,0.00,0.00}{1,1,1}{##1}}} +\def\PY at tok@kd{\let\PY at bf=\textbf} +\def\PY at tok@ss{\let\PY at it=\textit} +\def\PY at tok@sr{\let\PY at it=\textit} +\def\PY at tok@k{\let\PY at bf=\textbf} +\def\PY at tok@kn{\let\PY at bf=\textbf} +\def\PY at tok@kr{\let\PY at bf=\textbf} +\def\PY at tok@s{\let\PY at it=\textit} +\def\PY at tok@sh{\let\PY at it=\textit} +\def\PY at tok@sc{\let\PY at it=\textit} +\def\PY at tok@sb{\let\PY at it=\textit} +\def\PY at tok@se{\let\PY at bf=\textbf\let\PY at it=\textit} +\def\PY at tok@sd{\let\PY at it=\textit} + +\def\PYZbs{\char`\\} +\def\PYZus{\char`\_} +\def\PYZob{\char`\{} +\def\PYZcb{\char`\}} +\def\PYZca{\char`\^} +% for compatibility with earlier versions +\def\PYZat{@} +\def\PYZlb{[} +\def\PYZrb{]} +\makeatother + diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/paper.bib @@ -0,0 +1,261 @@ + + at inproceedings{carl_friedrich_bolz_towards_????, + series = {{LNCS} 6037 to appear}, + title = {Towards {Just-In-Time} Partial Evaluation of Prolog}, + abstract = {We introduce a just-in-time specializer for Prolog. Just-in- +time specialization attempts to unify of the concepts and benefits of +partial evaluation {(PE)} and just-in-time {(JIT)} compilation. It is a variant +of {PE} that occurs purely at runtime, which lazily generates residual code +and is constantly driven by runtime feedback. +Our prototype is an on-line just-in-time partial evaluator. A major fo- +cus of our work is to remove the overhead incurred when executing an +interpreter written in Prolog. It improves over classical offline {PE} by re- +quiring almost no heuristics nor hints from the author of the interpreter; +it also avoids most termination issues due to interleaving execution and +specialization. We evaluate the performance of our prototype on a small +number of benchmarks.}, + booktitle = {Logic-based Program Synthesis and Transformation {(LOPSTR'2009)}}, + publisher = {{Springer-Verlag}}, + author = {Carl Friedrich Bolz and Michael Leuschel and Armin Rigo} +}, + + at phdthesis{cuni_high_2010, + title = {High performance implementation of Python for {CLI/.NET} with {JIT} compiler generation for dynamic languages.}, + school = {Dipartimento di Informatica e Scienze {dell'Informazione,} University of Genova}, + author = {Antonio Cuni}, + year = {2010}, + note = {Technical Report {DISI-TH-2010-05}} +}, + + at inproceedings{carl_friedrich_bolz_towards_2010, + address = {Hagenberg, Austria}, + title = {Towards a Jitting {VM} for Prolog execution}, + isbn = {978-1-4503-0132-9}, + url = {http://portal.acm.org/citation.cfm?id=1836102}, + doi = {10.1145/1836089.1836102}, + abstract = {Most Prolog implementations are implemented in low-level languages such as C and are based on a variation of the {WAM} instruction set, which enhances their performance but makes them hard to write. In addition, many of the more dynamic features of Prolog (like assert), despite their popularity, are not well supported. We present a high-level continuation-based Prolog interpreter based on the {PyPy} project. The {PyPy} project makes it possible to easily and efficiently implement dynamic languages. It provides tools that automatically generate a just-in-time compiler for a given interpreter of the target language, by using partial evaluation techniques. The resulting Prolog implementation is surprisingly efficient: it clearly outperforms existing interpreters of Prolog in high-level languages such as Java. Moreover, on some benchmarks, our system outperforms state-of-the-art {WAM-based} Prolog implementations. Our paper aims to show that declarative languages such as Prolog can indeed benefit from having a just-in-time compiler and that {PyPy} can form the basis for implementing programming languages other than Python.}, + booktitle = {Proceedings of the 12th international {ACM} {SIGPLAN} symposium on Principles and practice of declarative programming}, + publisher = {{ACM}}, + author = {Carl Friedrich Bolz and Michael Leuschel and David Schneider}, + year = {2010}, + keywords = {interpreters, jit, logic programming, partial evaluation}, + pages = {99--108} +}, + + at inproceedings{garg_compiling_2010, + address = {Pittsburgh, Pennsylvania}, + title = {Compiling Python to a hybrid execution environment}, + isbn = {978-1-60558-935-0}, + url = {http://portal.acm.org/citation.cfm?id=1735695&dl=GUIDE&coll=GUIDE&CFID=108695705&CFTOKEN=81778166}, + doi = {10.1145/1735688.1735695}, + abstract = {A new compilation framework enables the execution of numerical-intensive applications, written in Python, on a hybrid execution environment formed by a {CPU} and a {GPU.} This compiler automatically computes the set of memory locations that need to be transferred to the {GPU,} and produces the correct mapping between the {CPU} and the {GPU} address spaces. Thus, the programming model implements a virtual shared address space. This framework is implemented as a combination of {unPython,} an ahead-of-time compiler from {Python/NumPy} to the C programming language, and {jit4GPU,} a just-in-time compiler from C to the {AMD} {CAL} interface. Experimental evaluation demonstrates that for some benchmarks the generated {GPU} code is 50 times faster than generated {OpenMP} code. The {GPU} performance also compares favorably with optimized {CPU} {BLAS} code for single-precision computations in most cases.}, + booktitle = {Proceedings of the 3rd Workshop on {General-Purpose} Computation on Graphics Processing Units}, + publisher = {{ACM}}, + author = {Rahul Garg and Jos\'{e} Nelson Amaral}, + year = {2010}, + pages = {19--30} +}, + + at inproceedings{bebenita_spur:_2010, + address = {{Reno/Tahoe,} Nevada, {USA}}, + title = {{SPUR:} a trace-based {JIT} compiler for {CIL}}, + isbn = {978-1-4503-0203-6}, + shorttitle = {{SPUR}}, + url = {http://portal.acm.org/citation.cfm?id=1869459.1869517&coll=GUIDE&dl=GUIDE&type=series&idx=SERIES318&part=series&WantType=Proceedings&title=OOPSLA%2FSPLASH&CFID=106280261&CFTOKEN=29377718}, + doi = {10.1145/1869459.1869517}, + abstract = {Tracing just-in-time compilers {(TJITs)} determine frequently executed traces (hot paths and loops) in running programs and focus their optimization effort by emitting optimized machine code specialized to these traces. Prior work has established this strategy to be especially beneficial for dynamic languages such as {JavaScript,} where the {TJIT} interfaces with the interpreter and produces machine code from the {JavaScript} trace.}, + booktitle = {Proceedings of the {ACM} international conference on Object oriented programming systems languages and applications}, + publisher = {{ACM}}, + author = {Michael Bebenita and Florian Brandner and Manuel Fahndrich and Francesco Logozzo and Wolfram Schulte and Nikolai Tillmann and Herman Venter}, + year = {2010}, + keywords = {cil, dynamic compilation, javascript, just-in-time, tracing}, + pages = {708--725} +}, + + at article{bolz_allocation_2011, + series = {{PEPM} '11}, + title = {Allocation removal by partial evaluation in a tracing {JIT}}, + location = {Austin, Texas, {USA}}, + doi = {10.1145/1929501.1929508}, + abstract = {The performance of many dynamic language implementations suffers from high allocation rates and runtime type checks. This makes dynamic languages less applicable to purely algorithmic problems, despite their growing popularity. In this paper we present a simple compiler optimization based on online partial evaluation to remove object allocations and runtime type checks in the context of a tracing {JIT.} We evaluate the optimization using a Python {VM} and find that it gives good results for all our (real-life) benchmarks.}, + journal = {Proceedings of the 20th {ACM} {SIGPLAN} workshop on Partial evaluation and program manipulation}, + author = {Carl Friedrich Bolz and Antonio Cuni and Maciej {FijaBkowski} and Michael Leuschel and Samuele Pedroni and Armin Rigo}, + year = {2011}, + note = {{ACM} {ID:} 1929508}, + keywords = {code generation, experimentation, interpreters, languages, optimization, partial evaluation, performance, run-time environments, tracing jit}, + pages = {43{\textendash}52} +}, + + at inproceedings{chang_tracing_2009, + address = {Washington, {DC,} {USA}}, + title = {Tracing for Web 3.0: Trace Compilation for the Next Generation Web Applications}, + isbn = {978-1-60558-375-4}, + shorttitle = {Tracing for web 3.0}, + url = {http://portal.acm.org/citation.cfm?id=1508293.1508304}, + doi = {10.1145/1508293.1508304}, + abstract = {Today's web applications are pushing the limits of modern web browsers. The emergence of the browser as the platform of choice for rich client-side applications has shifted the use of in-browser {JavaScript} from small scripting programs to large computationally intensive application logic. For many web applications, {JavaScript} performance has become one of the bottlenecks preventing the development of even more interactive client side applications. While traditional just-in-time compilation is successful for statically typed virtual machine based languages like Java, compiling {JavaScript} turns out to be a challenging task. Many {JavaScript} programs and scripts are short-lived, and users expect a responsive browser during page loading. This leaves little time for compilation of {JavaScript} to generate machine code.}, + booktitle = {Proceedings of the 2009 {ACM} {SIGPLAN/SIGOPS} International Conference on Virtual Execution Environments}, + publisher = {{ACM}}, + author = {Mason Chang and Edwin Smith and Rick Reitmaier and Michael Bebenita and Andreas Gal and Christian Wimmer and Brendan Eich and Michael Franz}, + year = {2009}, + keywords = {dynamically typed languages, forth, tamarin, trace trees, tracing, type specialization}, + pages = {71--80} +}, + + at phdthesis{carl_friedrich_bolz_automatic_2008, + type = {Master Thesis}, + title = {Automatic {JIT} Compiler Generation with Runtime Partial Evaluation}, + school = {{Heinrich-Heine-Universit\"{a}t} D\"{u}sseldorf}, + author = {Carl Friedrich Bolz}, + year = {2008} +}, + + at inproceedings{davide_ancona_rpython:_2007, + address = {Montreal, Quebec, Canada}, + title = {{RPython:} a step towards reconciling dynamically and statically typed {OO} languages}, + isbn = {978-1-59593-868-8}, + shorttitle = {{RPython}}, + url = {http://portal.acm.org/citation.cfm?id=1297091}, + doi = {10.1145/1297081.1297091}, + abstract = {Although the C-based interpreter of Python is reasonably fast, implementations on the {CLI} or the {JVM} platforms offers some advantages in terms of robustness and interoperability. Unfortunately, because the {CLI} and {JVM} are primarily designed to execute statically typed, object-oriented languages, most dynamic language implementations cannot use the native bytecodes for common operations like method calls and exception handling; as a result, they are not able to take full advantage of the power offered by the {CLI} and {JVM.}}, + booktitle = {Proceedings of the 2007 symposium on Dynamic languages}, + publisher = {{ACM}}, + author = {Davide Ancona and Massimo Ancona and Antonio Cuni and Nicholas D. Matsakis}, + year = {2007}, + keywords = {{JVM,} .net, Python}, + pages = {53--64} +}, + + at inproceedings{armin_rigo_pypys_2006, + address = {Portland, Oregon, {USA}}, + title = {{PyPy's} approach to virtual machine construction}, + isbn = {{1-59593-491-X}}, + url = {http://portal.acm.org/citation.cfm?id=1176753}, + doi = {10.1145/1176617.1176753}, + abstract = {The {PyPy} project seeks to prove both on a research and a practical level the feasibility of constructing a virtual machine {(VM)} for a dynamic language in a dynamic language - in this case, Python. The aim is to translate (i.e. compile) the {VM} to arbitrary target environments, ranging in level from {C/Posix} to {Smalltalk/Squeak} via Java and {CLI/.NET,} while still being of reasonable efficiency within these {environments.A} key tool to achieve this goal is the systematic reuse of the Python language as a system programming language at various levels of our architecture and translation process. For each level, we design a corresponding type system and apply a generic type inference engine - for example, the garbage collector is written in a style that manipulates simulated pointer and address objects, and when translated to C these operations become C-level pointer and address instructions.}, + booktitle = {Companion to the 21st {ACM} {SIGPLAN} conference on Object-oriented programming systems, languages, and applications}, + publisher = {{ACM}}, + author = {Armin Rigo and Samuele Pedroni}, + year = {2006}, + keywords = {metacircularity, Python, retargettable code generation, type inference, {VM}}, + pages = {944--953} +}, + + at article{georges_statistically_2007, + title = {Statistically rigorous java performance evaluation}, + volume = {42}, + url = {http://portal.acm.org/citation.cfm?id=1297105.1297033}, + doi = {10.1145/1297105.1297033}, + abstract = {Java performance is far from being trivial to benchmark because it is affected by various factors such as the Java application, its input, the virtual machine, the garbage collector, the heap size, etc. In addition, non-determinism at run-time causes the execution time of a Java program to differ from run to run. There are a number of sources of non-determinism such as {Just-In-Time} {(JIT)} compilation and optimization in the virtual machine {(VM)} driven by timer-based method sampling, thread scheduling, garbage collection, and various.}, + number = {10}, + journal = {{SIGPLAN} Not.}, + author = {Andy Georges and Dries Buytaert and Lieven Eeckhout}, + year = {2007}, + keywords = {benchmarking, data analysis, methodology, statistics}, + pages = {57--76}, + annote = {{{\textless}p{\textgreater}The} paper evaluates the various ways in which a number of Java papers do their Java benchmarks. It then proposes a statistically correct way to do this and compares common approaches against the statistically correct way. Especially if the results of two alternatives are very close together, many common approaches can lead to systematic errors.{\textless}/p{\textgreater}} +}, + + at inproceedings{andreas_gal_trace-based_2009, + title = {Trace-based {Just-in-Time} Type Specialization for Dynamic Languages}, + booktitle = {{PLDI}}, + author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and Blake Kaplan and Graydon Hoare and David Mandelin and Boris Zbarsky and Jason Orendorff and Michael Bebenita and Mason Chang and Michael Franz and Edwin Smith and Rick Reitmaier and Mohammad Haghighat}, + year = {2009}, + keywords = {toappear} +}, + + at inproceedings{bolz_tracing_2009, + address = {Genova, Italy}, + title = {Tracing the meta-level: {PyPy's} tracing {JIT} compiler}, + isbn = {978-1-60558-541-3}, + shorttitle = {Tracing the meta-level}, + url = {http://portal.acm.org/citation.cfm?id=1565827}, + doi = {10.1145/1565824.1565827}, + abstract = {We attempt to apply the technique of Tracing {JIT} Compilers in the context of the {PyPy} project, i.e., to programs that are interpreters for some dynamic languages, including Python. Tracing {JIT} compilers can greatly speed up programs that spend most of their time in loops in which they take similar code paths. However, applying an unmodified tracing {JIT} to a program that is itself a bytecode interpreter results in very limited or no speedup. In this paper we show how to guide tracing {JIT} compilers to greatly improve the speed of bytecode interpreters. One crucial point is to unroll the bytecode dispatch loop, based on two kinds of hints provided by the implementer of the bytecode interpreter. We evaluate our technique by applying it to two {PyPy} interpreters: one is a small example, and the other one is the full Python interpreter.}, + booktitle = {Proceedings of the 4th workshop on the Implementation, Compilation, Optimization of {Object-Oriented} Languages and Programming Systems}, + publisher = {{ACM}}, + author = {Carl Friedrich Bolz and Antonio Cuni and Maciej Fija\l{}kowski and Armin Rigo}, + year = {2009}, + pages = {18--25} +}, + + at techreport{armin_rigo_jit_2007, + title = {{JIT} Compiler Architecture}, + url = {http://codespeak.net/pypy/dist/pypy/doc/index-report.html}, + abstract = {{PyPy{\textquoteright}s} translation tool-chain {\textendash} from the interpreter written in {RPython} to generated {VMs} for low-level platforms {\textendash} is now able to extend those {VMs} with an automatically generated dynamic compiler, derived from the interpreter. This is achieved by a pragmatic application of partial evaluation techniques guided by a few hints added to the source of the interpreter. Crucial for the effectiveness of dynamic compilation is the use of run-time information to improve compilation results: in our approach, a novel powerful primitive called {\textquotedblleft}promotion{\textquotedblright} that {\textquotedblleft}promotes{\textquotedblright} run-time values to compile-time is used to that effect. In this report, we describe it along with other novel techniques that allow the approach to scale to something as large as {PyPy{\textquoteright}s} Python interpreter.}, + number = {D08.2}, + institution = {{PyPy}}, + author = {Armin Rigo and Samuele Pedroni}, + month = may, + year = {2007} +}, + + at article{bala_dynamo:_2000, + title = {Dynamo: a transparent dynamic optimization system}, + volume = {35}, + shorttitle = {Dynamo}, + url = {http://citeseer.ist.psu.edu/bala00dynamo.html}, + number = {5}, + journal = {{ACM} {SIGPLAN} Notices}, + author = {Vasanth Bala and Evelyn Duesterwald and Sanjeev Banerjia}, + year = {2000}, + keywords = {toread}, + pages = {1--12} +}, + + at inproceedings{gal_hotpathvm:_2006, + address = {Ottawa, Ontario, Canada}, + title = {{HotpathVM:} an effective {JIT} compiler for resource-constrained devices}, + isbn = {1-59593-332-6}, + shorttitle = {{HotpathVM}}, + url = {http://portal.acm.org/citation.cfm?doid=1134760.1134780}, + doi = {10.1145/1134760.1134780}, + abstract = {We present a just-in-time compiler for a Java {VM} that is small enough to fit on resource-constrained devices, yet is surprisingly effective. Our system dynamically identifies traces of frequently executed bytecode instructions (which may span several basic blocks across several methods) and compiles them via Static Single Assignment {(SSA)} construction. Our novel use of {SSA} form in this context allows to hoist instructions across trace side-exits without necessitating expensive compensation code in off-trace paths. The overall memory consumption (code and data) of our system is only 150 {kBytes,} yet benchmarks show a speedup that in some cases rivals heavy-weight just-in-time compilers.}, + booktitle = {Proceedings of the 2nd international conference on Virtual execution environments}, + publisher = {{ACM}}, + author = {Andreas Gal and Christian W. Probst and Michael Franz}, + year = {2006}, + keywords = {dynamic compilation, embedded, software trace scheduling, {SSA,} {VM}}, + pages = {144--153} +}, + + at inproceedings{carl_friedrich_bolz_how_2007, + title = {How to not write a Virtual Machine}, + abstract = {Typical modern dynamic languages have a growing number of implementations. We explore the reasons for this situation, and the limitations it imposes on open source or academic communities that lack the resources to fine-tune and maintain them all. It is sometimes proposed that implementing dynamic languages on top of a standardized general-purpose object-oriented virtual machine (like Java or {.NET)} would help reduce this burden. We propose a complementary alternative to writing custom virtual machine {(VMs)} by hand, validated by the {PyPy} project: flexibly generating {VMs} from a high-level "specification", +inserting features and low-level details automatically {\textendash} including good just-in-time compilers tuned to the dynamic language at hand. +We believe this to be ultimately a better investment of efforts than the development of more and more advanced general-purpose object +oriented {VMs.} In this paper we compare these two approaches in detail.}, + booktitle = {Proceedings of the 3rd Workshop on Dynamic Languages and Applications {(DYLA} 2007)}, + author = {Carl Friedrich Bolz and Armin Rigo}, + year = {2007} +}, + + at inproceedings{rigo_representation-based_2004, + address = {Verona, Italy}, + title = {Representation-based just-in-time specialization and the Psyco prototype for Python}, + isbn = {1-58113-835-0}, + url = {http://portal.acm.org/citation.cfm?id=1014010}, + doi = {10.1145/1014007.1014010}, + abstract = {A powerful application of specialization is to remove interpretative overhead: a language can be implemented with an interpreter, whose performance is then improved by specializing it for a given program source. This approach is only moderately successful with very high level languages, where the operation of each single step can be highly dependent on run-time data and context. In the present paper, the Psyco prototype for the Python language is presented. It introduces two novel techniques. The first is just-in-time specialization, or specialization by need, which introduces the "unlifting" ability for a value to be promoted from run-time to compile-time during specialization -- the inverse of the lift operator of partial evaluation. Its presence gives an unusual and powerful perspective on the specialization process. The second technique is representations, a theory of data-oriented specialization generalizing the traditional specialization domains (i.e. the compile-time/run-time dichotomy).}, + booktitle = {Proceedings of the 2004 {ACM} {SIGPLAN} symposium on Partial evaluation and semantics-based program manipulation}, + publisher = {{ACM}}, + author = {Armin Rigo}, + year = {2004}, + keywords = {{JIT,} Python}, + pages = {15--26} +}, + + at incollection{carl_friedrich_bolz_back_2008, + title = {Back to the Future in One Week {\textemdash} Implementing a Smalltalk {VM} in {PyPy}}, + url = {http://dx.doi.org/10.1007/978-3-540-89275-5_7}, + abstract = {We report on our experiences with the Spy project, including implementation details and benchmark results. Spy is a re-implementation of the Squeak (i.e. Smalltalk-80) {VM} using the {PyPy} toolchain. The {PyPy} project allows code written in {RPython,} a subset of Python, to be translated +to a multitude of different backends and architectures. During the translation, many aspects of the implementation can be +independently tuned, such as the garbage collection algorithm or threading implementation. In this way, a whole host of interpreters +can be derived from one abstract interpreter definition. Spy aims to bring these benefits to Squeak, allowing for greater portability and, eventually, improved performance. The current +Spy codebase is able to run a small set of benchmarks that demonstrate performance superior to many similar Smalltalk {VMs,} but +which still run slower than in Squeak itself. Spy was built from scratch over the course of a week during a joint {Squeak-PyPy} Sprint in Bern last autumn.}, + booktitle = {{Self-Sustaining} Systems}, + author = {Carl Friedrich Bolz and Adrian Kuhn and Adrian Lienhard and Nicholas Matsakis and Oscar Nierstrasz and Lukas Renggli and Armin Rigo and Toon Verwaest}, + year = {2008}, + pages = {123--139} +} \ No newline at end of file diff --git a/talk/icooolps2011/code/map.py b/talk/icooolps2011/code/map.py new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/map.py @@ -0,0 +1,47 @@ +class Map(object): + def __init__(self): + self.indexes = {} + self.other_maps = {} + + @purefunction + def getindex(self, name): + return self.indexes.get(name, -1) + + @purefunction + def add_attribute(self, name): + if name not in self.other_maps: + newmap = Map() + newmap.indexes.update(self.indexes) + newmap.indexes[name] = len(self.indexes) + self.other_maps[name] = newmap + return self.other_maps[name] + +EMPTY_MAP = Map() + +class Instance(object): + def __init__(self, cls): + self.cls = cls + self.map = EMPTY_MAP + self.storage = [] + + def getfield(self, name): + map = hint(self.map, promote=True) + index = map.getindex(name) + if index != -1: + return self.storage[index] + raise AttributeError(name) + + def write_attribute(self, name, value): + map = hint(self.map, promote=True) + index = map.getindex(name) + if index != -1: + self.storage[index] = value + return + self.map = map.add_attribute(name) + self.storage.append(value) + + def getattr(self, name): + try: + return self.getfield(name) + except AttributeError: + return self.cls.find_method(name) diff --git a/talk/icooolps2011/code/interpreter-slow.py b/talk/icooolps2011/code/interpreter-slow.py new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/interpreter-slow.py @@ -0,0 +1,37 @@ +class Class(object): + def __init__(self, name): + self.name = name + self.methods = {} + + def instantiate(self): + return Instance(self) + + def find_method(self, name): + result = self.methods.get(name) + if result is not None: + return result + raise AttributeError(name) + + def change_method(self, name, value): + self.methods[name] = value + + +class Instance(object): + def __init__(self, cls): + self.cls = cls + self.attributes = {} + + def getfield(self, name): + result = self.attributes.get(name) + if result is not None: + return result + raise AttributeError(name) + + def write_attribute(self, name, value): + self.attributes[name] = value + + def getattr(self, name): + try: + return self.getfield(name) + except AttributeError: + return self.cls.find_method(name) diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/trace1.tex @@ -0,0 +1,28 @@ +\begin{Verbatim} +# inst.getattr("a") +attributes1 = inst.attributes +result1 = dict.get(attributes1, "a") +guard(result1 is not None) + +# inst.getattr("b") +attributes2 = inst.attributes +v1 = dict.get(attributes2, "b") +guard(v1 is None) +cls1 = inst.cls +methods1 = cls.methods +result2 = dict.get(methods1, "b") +guard(result2 is not None) +v2 = result1 + result2 + +# inst.getattr("c") +attributes3 = inst.attributes +v3 = dict.get(attributes3, "c") +guard(v3 is None) +cls1 = inst.cls +methods2 = cls.methods +result3 = dict.get(methods2, "c") +guard(result3 is not None) + +v4 = v2 + result3 +return(v4) +\end{Verbatim} From commits-noreply at bitbucket.org Thu Mar 24 15:19:40 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 15:19:40 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: more figureyfication Message-ID: <20110324141940.CF2322A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3394:901a8645f1ba Date: 2011-03-24 15:19 +0100 http://bitbucket.org/pypy/extradoc/changeset/901a8645f1ba/ Log: more figureyfication diff --git a/talk/icooolps2011/code/trace2.tex b/talk/icooolps2011/code/trace2.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/trace2.tex @@ -0,0 +1,33 @@ +\begin{Verbatim} +# inst.getattr("a") +map1 = inst.map +guard(map1 == 0xb74af4a8) +index1 = Map.getindex(map1, "a") +guard(index1 != -1) +storage1 = inst.storage +result1 = storage1[index1] + +# inst.getattr("b") +map2 = inst.map +guard(map2 == 0xb74af4a8) +index2 = Map.getindex(map2, "b") +guard(index2 == -1) +cls1 = inst.cls +methods1 = cls.methods +result2 = dict.get(methods1, "b") +guard(result2 is not None) +v2 = result1 + result2 + +# inst.getattr("c") +map3 = inst.map +guard(map3 == 0xb74af4a8) +index3 = Map.getindex(map3, "c") +guard(index3 == -1) +cls1 = inst.cls +methods2 = cls.methods +result3 = dict.get(methods2, "c") +guard(result3 is not None) + +v4 = v2 + result3 +return(v4) +\end{Verbatim} diff --git a/talk/icooolps2011/code/trace5.tex b/talk/icooolps2011/code/trace5.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/trace5.tex @@ -0,0 +1,18 @@ +\begin{Verbatim} +# inst.getattr("a") +map1 = inst.map +guard(map1 == 0xb74af4a8) +storage1 = inst.storage +result1 = storage1[0] + +# inst.getattr("b") +cls1 = inst.cls +guard(cls1 == 0xb7aaaaf8) +version1 = cls1.version +guard(version1 == 0xb7bbbb18) +v2 = result1 + 41 + +# inst.getattr("c") +v4 = v2 + 17 +return(v4) +\end{Verbatim} diff --git a/talk/icooolps2011/code/interpreter-slow.tex b/talk/icooolps2011/code/interpreter-slow.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/interpreter-slow.tex @@ -0,0 +1,39 @@ +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{k}{def} \PY{n+nf}{instantiate}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n}{Instance}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + + +\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + + \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + + \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{k}{try}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} + \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} +\end{Verbatim} diff --git a/talk/icooolps2011/code/version.tex b/talk/icooolps2011/code/version.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/version.tex @@ -0,0 +1,26 @@ +\begin{Verbatim}[commandchars=\\\{\}] +\PY{k}{class} \PY{n+nc}{VersionTag}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{pass} + +\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} + \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{version} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} + \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)} + \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} + \PY{k}{return} \PY{n}{result} + \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{n+nd}{@purefunction} + \PY{k}{def} \PY{n+nf}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)}\PY{p}{:} + \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + + \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} +\end{Verbatim} diff --git a/talk/icooolps2011/code/trace3.tex b/talk/icooolps2011/code/trace3.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/trace3.tex @@ -0,0 +1,23 @@ +\begin{Verbatim} +# inst.getattr("a") +map1 = inst.map +guard(map1 == 0xb74af4a8) +storage1 = inst.storage +result1 = storage1[0] + +# inst.getattr("b") +cls1 = inst.cls +methods1 = cls1.methods +result2 = dict.get(methods1, "b") +guard(result2 is not None) +v2 = result1 + result2 + +# inst.getattr("c") +cls2 = inst.cls +methods2 = cls2.methods +result3 = dict.get(methods2, "c") +guard(result3 is not None) + +v4 = v2 + result3 +return(v4) +\end{Verbatim} diff --git a/talk/icooolps2011/Makefile b/talk/icooolps2011/Makefile --- a/talk/icooolps2011/Makefile +++ b/talk/icooolps2011/Makefile @@ -1,5 +1,5 @@ -jit-hints.pdf: paper.tex paper.bib code/interpreter-slow.tex code/map.tex +jit-hints.pdf: paper.tex paper.bib code/interpreter-slow.tex code/map.tex code/version.tex pdflatex paper bibtex paper pdflatex paper diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -89,6 +89,10 @@ %___________________________________________________________________________ \section{Introduction} +XXX how exactly +the hints work that interpreter authors can use to improve the execution speed +of the programs running on their interpreters? + %___________________________________________________________________________ \section{The PyPy Project} @@ -107,10 +111,6 @@ %___________________________________________________________________________ \section{Controlling The Extent of Tracing} -XXX how exactly -the hints work that interpreter authors can use to improve the execution speed -of the programs running on their interpreters? - \subsection{Background} @@ -133,15 +133,14 @@ the loop in the user function that is being considered. At this point, it can have traced many iterations of the interpreter main loop. -Here's a diagram of this process: - \begin{figure*} \includegraphics[scale=0.5]{figures/trace-levels} \caption{The levels involved in tracing} \label{fig:trace-levels} \end{figure*} -On the left you see the levels of execution. The CPU executes the binary of +Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left you +see the levels of execution. The CPU executes the binary of PyPy's Python interpreter, which consists of RPython functions that have been compiled first to C, then to machine code. Some of these functions contain loops, others don't. The interpreter runs a Python program written by a @@ -658,19 +657,19 @@ find a way to get rid of these dictionary lookups somehow. Let's assume we trace through code that sums three attributes, such as: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} \PY{o}{+} \PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} \PY{o}{+} \PY{n}{inst}\PY{o}{.}\PY{n}{getattr}\PY{p}{(}\PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} + +\begin{Verbatim} +inst.getattr("a") + inst.getattr("b") + inst.getattr("c") \end{Verbatim} -The trace could look like this: - \begin{figure} \input{code/trace1.tex} \caption{Trace Through the Object Model} \label{fig:trace1} \end{figure} -In this example, the attribute \texttt{a} is found on the instance, but the +The trace would look like in Figure~\ref{fig:trace1}. In this example, the +attribute \texttt{a} is found on the instance, but the attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains five calls to \texttt{dict.get}, which is slow. @@ -717,69 +716,25 @@ With this changed instance implementation, the trace we had above changes to the following, where \texttt{0xb74af4a8} is the memory address of the Map instance that -has been promoted: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{c}{# inst.getattr("a")} -\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index1} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index1} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} -\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{n}{index1}\PY{p}{]} +has been promoted, see Figure~\ref{fig:trace2}. -\PY{c}{# inst.getattr("b")} -\PY{n}{map2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map2} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index2} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index2} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods1} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} -\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} - -\PY{c}{# inst.getattr("c")} -\PY{n}{map3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map3} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index3} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index3} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods2} \PY{o}{=} \PY{n}{cls}\PY{o}{.}\PY{n}{methods} -\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} - -\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} -\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/trace2.tex} +\caption{Unoptimized Trace After the Introduction of Maps} +\label{fig:trace2} +\end{figure} The calls to \texttt{Map.getindex} can be optimized away, because they are calls to a pure function and they have constant arguments. That means that \texttt{index1/2/3} are constant and the guards on them can be removed. All but the first guard on the map will be optimized away too, because the map cannot have changed in -between. The optimized trace looks like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{c}{# inst.getattr("a")} -\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} -\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{l+m+mi}{0}\PY{p}{]} +between. The optimized trace looks can be seen in Figure~\ref{fig:trace3} -\PY{c}{# inst.getattr("b")} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{methods} -\PY{n}{result2} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} - -\PY{c}{# inst.getattr("c")} -\PY{n}{cls2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{methods2} \PY{o}{=} \PY{n}{cls2}\PY{o}{.}\PY{n}{methods} -\PY{n}{result3} \PY{o}{=} \PY{n+nb}{dict}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{methods2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} - -\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} -\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/trace3.tex} +\caption{Optimized Trace After the Introduction of Maps} +\label{fig:trace3} +\end{figure} The index \texttt{0} that is used to read out of the \texttt{storage} array is the result of the constant-folded \texttt{getindex} call. This trace is already much better than @@ -807,99 +762,36 @@ version)} pair will always be the same, i.e. it is a pure operation. To help the JIT to detect this case, we factor it out in a helper method which is explicitly marked as \texttt{@purefunction}. The refactored \texttt{Class} looks like -this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{VersionTag}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{pass} +in Figure~\ref{fig:version} -\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{version} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} - - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} - - \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/version.tex} +\caption{Versioning of Classes} +\label{fig:version} +\end{figure} What is interesting here is that \texttt{\_find\_method} takes the \texttt{version} argument but it does not use it at all. Its only purpose is to make the call pure (because when the version number changes, the result of the call might be different than the previous one). -The trace with this new class implementation looks like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{c}{# inst.getattr("a")} -\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index1} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map1}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{a}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index1} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} -\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{n}{index1}\PY{p}{]} +\begin{figure} +\input{code/trace4.tex} +\caption{Unoptimized Trace After Introduction of Versioned Classes} +\label{fig:trace4} +\end{figure} -\PY{c}{# inst.getattr("b")} -\PY{n}{map2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map2} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index2} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map2}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index2} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{guard}\PY{p}{(}\PY{n}{cls1} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} -\PY{n}{version1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{version} -\PY{n}{guard}\PY{p}{(}\PY{n}{version1} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} -\PY{n}{result2} \PY{o}{=} \PY{n}{Class}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{cls}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{b}\PY{l+s}{"}\PY{p}{,} \PY{n}{version1}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result2} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} -\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{n}{result2} - -\PY{c}{# inst.getattr("c")} -\PY{n}{map3} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map3} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{index3} \PY{o}{=} \PY{n}{Map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{map3}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{index3} \PY{o}{==} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} -\PY{n}{cls2} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{guard}\PY{p}{(}\PY{n}{cls2} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} -\PY{n}{version2} \PY{o}{=} \PY{n}{cls2}\PY{o}{.}\PY{n}{version} -\PY{n}{guard}\PY{p}{(}\PY{n}{version2} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} -\PY{n}{result3} \PY{o}{=} \PY{n}{Class}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{cls}\PY{p}{,} \PY{l+s}{"}\PY{l+s}{c}\PY{l+s}{"}\PY{p}{,} \PY{n}{version2}\PY{p}{)} -\PY{n}{guard}\PY{p}{(}\PY{n}{result3} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{)} - -\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{n}{result3} -\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} -\end{Verbatim} - +The trace with this new class implementation can be seen in +Figure~\ref{fig:trace4}. The calls to \texttt{Class.\_find\_method} can now be optimized away, also the promotion of the class and the version, except for the first one. The final -optimized trace looks like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{c}{# inst.getattr("a")} -\PY{n}{map1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{map} -\PY{n}{guard}\PY{p}{(}\PY{n}{map1} \PY{o}{==} \PY{l+m+mh}{0xb74af4a8}\PY{p}{)} -\PY{n}{storage1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{storage} -\PY{n}{result1} \PY{o}{=} \PY{n}{storage1}\PY{p}{[}\PY{l+m+mi}{0}\PY{p}{]} +optimized trace can be seen in Figure~\ref{fig:trace5}. -\PY{c}{# inst.getattr("b")} -\PY{n}{cls1} \PY{o}{=} \PY{n}{inst}\PY{o}{.}\PY{n}{cls} -\PY{n}{guard}\PY{p}{(}\PY{n}{cls1} \PY{o}{==} \PY{l+m+mh}{0xb7aaaaf8}\PY{p}{)} -\PY{n}{version1} \PY{o}{=} \PY{n}{cls1}\PY{o}{.}\PY{n}{version} -\PY{n}{guard}\PY{p}{(}\PY{n}{version1} \PY{o}{==} \PY{l+m+mh}{0xb7bbbb18}\PY{p}{)} -\PY{n}{v2} \PY{o}{=} \PY{n}{result1} \PY{o}{+} \PY{l+m+mi}{41} - -\PY{c}{# inst.getattr("c")} -\PY{n}{v4} \PY{o}{=} \PY{n}{v2} \PY{o}{+} \PY{l+m+mi}{17} -\PY{k}{return}\PY{p}{(}\PY{n}{v4}\PY{p}{)} -\end{Verbatim} +\begin{figure} +\input{code/trace5.tex} +\caption{Optimized Trace After Introduction of Versioned Classes} +\label{fig:trace5} +\end{figure} The constants \texttt{41} and \texttt{17} are the results of the folding of the \texttt{\_find\_method`} calls. This final trace is now very good. It no longer performs any diff --git a/talk/icooolps2011/code/trace4.tex b/talk/icooolps2011/code/trace4.tex new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/trace4.tex @@ -0,0 +1,37 @@ +\begin{Verbatim} +# inst.getattr("a") +map1 = inst.map +guard(map1 == 0xb74af4a8) +index1 = Map.getindex(map1, "a") +guard(index1 != -1) +storage1 = inst.storage +result1 = storage1[index1] + +# inst.getattr("b") +map2 = inst.map +guard(map2 == 0xb74af4a8) +index2 = Map.getindex(map2, "b") +guard(index2 == -1) +cls1 = inst.cls +guard(cls1 == 0xb7aaaaf8) +version1 = cls1.version +guard(version1 == 0xb7bbbb18) +result2 = Class._find_method(cls, "b", version1) +guard(result2 is not None) +v2 = result1 + result2 + +# inst.getattr("c") +map3 = inst.map +guard(map3 == 0xb74af4a8) +index3 = Map.getindex(map3, "c") +guard(index3 == -1) +cls2 = inst.cls +guard(cls2 == 0xb7aaaaf8) +version2 = cls2.version +guard(version2 == 0xb7bbbb18) +result3 = Class._find_method(cls, "c", version2) +guard(result3 is not None) + +v4 = v2 + result3 +return(v4) +\end{Verbatim} diff --git a/talk/icooolps2011/code/version.py b/talk/icooolps2011/code/version.py new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/code/version.py @@ -0,0 +1,24 @@ +class VersionTag(object): + pass + +class Class(object): + def __init__(self, name): + self.name = name + self.methods = {} + self.version = VersionTag() + + def find_method(self, name): + self = hint(self, promote=True) + version = hint(self.version, promote=True) + result = self._find_method(name, version) + if result is not None: + return result + raise AttributeError(name) + + @purefunction + def _find_method(self, name, version): + return self.methods.get(name) + + def change_method(self, name, value): + self.methods[name] = value + self.version = VersionTag() From commits-noreply at bitbucket.org Thu Mar 24 15:21:41 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 15:21:41 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: kill part about unroll_safe and dont_look_inside. We have a space-problem and Message-ID: <20110324142141.81D372A2030@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3395:9088f6507a98 Date: 2011-03-24 15:21 +0100 http://bitbucket.org/pypy/extradoc/changeset/9088f6507a98/ Log: kill part about unroll_safe and dont_look_inside. We have a space- problem and this material is not that important. diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -149,145 +149,6 @@ in the user program. -\subsection{How Far Should Tracing Go} - -When the tracer encounters a function call at the interpreter level, e.g. the -interpreter main loop calling a helper function, it can do one of two things: - -\begin{enumerate} -\item it can trace into the helper function, effectively inlining it into the trace. - -\item it can not trace into the function and instead record a call to that function -as an operation in the trace. Such a call operation in the trace is sometimes -called \emph{residual call}. -\end{enumerate} - -As a default, the tracer will try to trace into the helper because that will -give more information to the optimizer, allowing it to do a better job. This is -particularly important for the allocation removal optimization, because if a -freshly allocated object is passed as an argument to a residual call, its -allocation cannot be optimized away. - -There is a problem however if the helper function itself contains a loop. The -tracer records the linear sequence of operations that are being executed. Thus -when it encounters a loop on the interpreter level it records all the -operations of every iteration of the loop itself, with the net effect of -unrolling it. The only places where the tracer stops and tries to close the -trace is in the main loop of the interpreter. When the tracer encounters the -main loop, it also checks whether the original user loop has been closed, and -thus whether it can stop tracing. - -For most helper functions in the interpreter that contain loops, fully -unrolling does not make sense. If a loop is unrolled, the trace is specific to -the number of iteration that was seen during tracing. If the trace is later -executed with a different number of iterations, the trace will be left via a -guard failure, which is inefficient. Therefore the default behaviour of the -tracer is to never trace into a function on the interpreter level that contains -a loop, but to trace into all non-looping helper functions. - -This default behaviour is essentially a heuristic, but one that usually makes -sense. We want to produce just enough traces to make the resulting code -efficient, but not more. Therefore we trace as much as possible (everything by -default) except the functions which loops where tracing would produce code that -is less general than it could be. - -As an example for a helper with a loop, take string concatenation. It loops over -the characters of both arguments and copies them over into the result string. It -does not make sense to unroll the loops in this function. If we do that, -the resulting trace can only be used for strings of the length that was seen -during tracing. In practise, the string lengths are usually different each run, -meaning that the trace with unrolling is not run to completion in most cases. - - -\subsection{Influencing the Default Behaviour} - -Sometimes the default behaviour is not actually what is wanted. This is -something the interpreter author has to decide, usually by looking at the traces -that are produced and deciding that they should be improved. There are two ways -in which the default is wrong: -% -\begin{itemize} - -\item \textbf{false negatives:} if a helper function that \textbf{does} contain a loop should -be traced into, unrolling the loop. - -\item \textbf{false positives:} if a helper function that \textbf{does not} contain a loop is -inlined into the trace, but the interpreter author decides that this is not -helpful. - -\end{itemize} - -If the interpreter author finds false negatives or false positives, she can fix -that by applying a hint to the tracer. These hints take the form of function -decorators (which both live in the \Verb|pypy.rlib.jit| module). In the next two -subsections we describe these two function decorators and their use. - - -\subsubsection{Unrolling Functions With Loops} - -The first decorator, used to fix false negatives, is the \texttt{unroll\_safe} -decorator. It is used to tell the tracer to always trace into a function that -has a loop, effectively unrolling the loop. This decorator should be used only -if the loop in the helper function is expected to always run for the same number -of iterations. This sounds like a strong restriction, in practise this is less -severe: The number of iterations needs to only be the same \emph{in the context where -the helper functions is traced from}. - -It is easiest to understand this condition via an example. Let's look at the -\texttt{BUILD\_TUPLE} bytecode in Python. It takes one argument, the length \texttt{n} of -the tuple being built. The bytecode pops \texttt{n} arguments from the stack, turns -them into a tuple and pushes that tuple on the stack. Thus the function that -implements \texttt{BUILD\_TUPLE} in PyPy's Python interpreter calls a helper -\texttt{popvalues} which pops \texttt{n} values from the stack and returns them in a list. -This helper is implemented with a loop and would thus not be traced into by -default. The loop in the helper can run for very different numbers of -iterations, because it is used in a variety of places. However, for every -concrete \texttt{BUILD\_TUPLE} bytecode, the argument will be constant. Therefore it -is safe (and even necessary) to annotate \texttt{popvalues} with the \texttt{unroll\_safe} -decorator. - -A different example is the implementation of the \texttt{isinstance} builtin. It is -used to check whether an object \texttt{a} is an instance of a class \texttt{B} like -this: \texttt{isinstance(a, B)}. The second argument of the function can also be a -tuple of classes to check whether an object is an instance of one of a number of -classes: \texttt{isinstance(a, (A, B, C, D))}. To implement this second case, the -implementation of \texttt{isinstance} contains a loop iterating over the elements of -the tuple. The number of loop iterations can vary, but is usually fixed for each -individual call site which typically just lists a few classes in the source -code. Therefore it is also safe to annotate the implementation of \texttt{isinstance} -with the \texttt{unroll\_safe} decorator. - - -\subsubsection{Preventing the Tracing of Functions} - -The second decorator \texttt{dont\_look\_inside} is used to fix false positives. It -tells the JIT to never trace into the decorated function and just always produce -a residual call instead. This decorator is in many ways less important than the -unrolling one (except for a special situation that is described in -Section XXX). It is used if tracing into a function is not expected to yield -any speed benefits, because the optimizer will not be able to improve it much. -This is often the case if the called helper function does not contain any -``dynamic'' behaviour. In such a situation it is better to just leave the function -call in the trace, because that produces less code. - -An example would be the import mechanism in Python. It's very unlikely that any -performance improvement can be had by turning part of it into assembler. -Therefore we hide it from the tracer by annotating them with -\texttt{dont\_look\_inside}. - - -\subsection{Conclusion} - -In this section we discussed two hints that can be used to control precisely which -parts of the interpreter should be meta-traced. If these hints are used -carefully, this can go a long way to making the interpreter produce traces that -contain exactly the interesting part of the execution, and will contain calls to -the functions that can not be optimized by tracing techniques. - -In the next section we discuss a different set of hints that can -be used to strongly optimize traces. - -%___________________________________________________________________________ \section{Controlling Optimization} From commits-noreply at bitbucket.org Thu Mar 24 16:50:19 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 24 Mar 2011 16:50:19 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Revert 2656b457b868 Message-ID: <20110324155019.419902A2031@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42902:212607b9319d Date: 2011-03-24 09:49 -0600 http://bitbucket.org/pypy/pypy/changeset/212607b9319d/ Log: Revert 2656b457b868 diff --git a/pypy/objspace/std/mapdict.py b/pypy/objspace/std/mapdict.py --- a/pypy/objspace/std/mapdict.py +++ b/pypy/objspace/std/mapdict.py @@ -636,7 +636,6 @@ def _clear_fields(self): self.w_obj = None - @jit.dont_look_inside def _as_rdict(self): self.initialize_as_rdict() space = self.space From commits-noreply at bitbucket.org Thu Mar 24 16:50:19 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 24 Mar 2011 16:50:19 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Provide a missing oopspec (for jit inlining reasons) Message-ID: <20110324155019.D5FB92A2031@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42903:804d53b007fa Date: 2011-03-24 09:49 -0600 http://bitbucket.org/pypy/pypy/changeset/804d53b007fa/ Log: Provide a missing oopspec (for jit inlining reasons) diff --git a/pypy/rpython/lltypesystem/rdict.py b/pypy/rpython/lltypesystem/rdict.py --- a/pypy/rpython/lltypesystem/rdict.py +++ b/pypy/rpython/lltypesystem/rdict.py @@ -512,6 +512,7 @@ ll_dict_insertclean(d, entry.key, entry.value, hash) i += 1 old_entries.delete() +ll_dict_resize.oopspec = 'dict.resize(d)' # ------- a port of CPython's dictobject.c's lookdict implementation ------- PERTURB_SHIFT = 5 From commits-noreply at bitbucket.org Thu Mar 24 17:56:05 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 17:56:05 +0100 (CET) Subject: [pypy-svn] pypy default: Refactor heap.py, encapsulating in a new class CachedField the Message-ID: <20110324165605.3D60C2A2033@codespeak.net> Author: Armin Rigo Branch: Changeset: r42904:4116744473d4 Date: 2011-03-24 17:55 +0100 http://bitbucket.org/pypy/pypy/changeset/4116744473d4/ Log: Refactor heap.py, encapsulating in a new class CachedField the exact states it can be in, and writing a long comment about it. This refactoring is actually giving benefits, by removing some extra setfields. To avoid breaking the original purpose of some of the tests which fail because they don't see the setfield any more, "fix" them by adding an escape() operation. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -1791,7 +1791,7 @@ """ self.optimize_loop(ops, ops) - def test_duplicate_setfield_1(self): + def test_duplicate_setfield_0(self): ops = """ [p1, i1, i2] setfield_gc(p1, i1, descr=valuedescr) @@ -1800,8 +1800,27 @@ """ expected = """ [p1, i1, i2] + jump(p1, i1, i2) + """ + # in this case, all setfields are removed, because we can prove + # that in the loop it will always have the same value + self.optimize_loop(ops, expected) + + def test_duplicate_setfield_1(self): + ops = """ + [p1] + i1 = escape() + i2 = escape() + setfield_gc(p1, i1, descr=valuedescr) setfield_gc(p1, i2, descr=valuedescr) - jump(p1, i1, i2) + jump(p1) + """ + expected = """ + [p1] + i1 = escape() + i2 = escape() + setfield_gc(p1, i2, descr=valuedescr) + jump(p1) """ self.optimize_loop(ops, expected) @@ -1848,6 +1867,7 @@ setfield_gc(p1, i4, descr=nextdescr) # setfield_gc(p1, i2, descr=valuedescr) + escape() jump(p1, i1, i2, p3) """ preamble = """ @@ -1860,6 +1880,7 @@ # setfield_gc(p1, i2, descr=valuedescr) setfield_gc(p1, i4, descr=nextdescr) + escape() jump(p1, i1, i2, p3, i3) """ expected = """ @@ -1871,6 +1892,7 @@ # setfield_gc(p1, i2, descr=valuedescr) setfield_gc(p1, i4, descr=nextdescr) + escape() jump(p1, i1, i2, p3, i3) """ self.optimize_loop(ops, expected, preamble) @@ -1943,6 +1965,7 @@ guard_true(i3) [] i4 = int_neg(i2) setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, i4) """ preamble = """ @@ -1950,12 +1973,14 @@ guard_true(i3) [p1] i4 = int_neg(i2) setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, i4) """ expected = """ [p1, i2, i4] guard_true(i4) [p1] setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, 1) """ self.optimize_loop(ops, expected, preamble) @@ -1969,6 +1994,7 @@ guard_true(i3) [] i4 = int_neg(i2) setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, i4) """ preamble = """ @@ -1976,12 +2002,14 @@ guard_true(i3) [i2, p1] i4 = int_neg(i2) setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, i4) """ expected = """ [p1, i2, i4] guard_true(i4) [i2, p1] setfield_gc(p1, NULL, descr=nextdescr) + escape() jump(p1, i2, 1) """ self.optimize_loop(ops, expected) @@ -2027,11 +2055,13 @@ guard_value(p1, ConstPtr(myptr)) [] setfield_gc(p1, i1, descr=valuedescr) setfield_gc(ConstPtr(myptr), i2, descr=valuedescr) + escape() jump(p1, i1, i2) """ expected = """ [i1, i2] setfield_gc(ConstPtr(myptr), i2, descr=valuedescr) + escape() jump(i1, i2) """ self.optimize_loop(ops, expected) @@ -3130,6 +3160,7 @@ guard_no_exception(descr=fdescr) [p2, p1] virtual_ref_finish(p2, p1) setfield_gc(p0, NULL, descr=refdescr) + escape() jump(p0, i1) """ preamble = """ @@ -3138,6 +3169,7 @@ call(i1, descr=nonwritedescr) guard_no_exception(descr=fdescr) [i3, i1, p0] setfield_gc(p0, NULL, descr=refdescr) + escape() jump(p0, i1) """ expected = """ @@ -3146,6 +3178,7 @@ call(i1, descr=nonwritedescr) guard_no_exception(descr=fdescr2) [i3, i1, p0] setfield_gc(p0, NULL, descr=refdescr) + escape() jump(p0, i1) """ self.optimize_loop(ops, expected, preamble) diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -3,8 +3,102 @@ from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.rlib.objectmodel import we_are_translated from pypy.jit.metainterp.jitexc import JitException +from pypy.jit.metainterp.optimizeopt.optimizer import Optimization -from pypy.jit.metainterp.optimizeopt.optimizer import Optimization + +class CachedField(object): + def __init__(self): + # Cache information for a field descr. It can be in one + # of two states: + # + # 1. 'cached_fields' is a dict mapping OptValues of structs + # to OptValues of fields. All fields on-heap are + # synchronized with the values stored in the cache. + # + # 2. we just did one setfield, which is delayed (and thus + # not synchronized). 'lazy_setfield' is the delayed + # ResOperation. In this state, 'cached_fields' contains + # out-of-date information. More precisely, the field + # value pending in the ResOperation is *not* visible in + # 'cached_fields'. + # + self._cached_fields = {} + self._lazy_setfield = None + self._lazy_setfield_registered = False + + def do_setfield(self, optheap, op): + # Update the state with the SETFIELD_GC operation 'op'. + structvalue = optheap.getvalue(op.getarg(0)) + fieldvalue = optheap.getvalue(op.getarg(1)) + if self.possible_aliasing(optheap, structvalue): + self.force_lazy_setfield(optheap) + assert not self.possible_aliasing(optheap, structvalue) + cached_fieldvalue = self._cached_fields.get(structvalue, None) + if cached_fieldvalue is not fieldvalue: + # common case: store the 'op' as lazy_setfield, and register + # myself in the optheap's _lazy_setfields list + self._lazy_setfield = op + if not self._lazy_setfield_registered: + optheap._lazy_setfields.append(self) + self._lazy_setfield_registered = True + else: + # this is the case where the pending setfield ends up + # storing precisely the value that is already there, + # as proved by 'cached_fields'. In this case, we don't + # need any _lazy_setfield: the heap value is already right. + # Note that this may reset to None a non-None lazy_setfield, + # cancelling its previous effects with no side effect. + self._lazy_setfield = None + + def possible_aliasing(self, optheap, structvalue): + # If lazy_setfield is set and contains a setfield on a different + # structvalue, then we are annoyed, because it may point to either + # the same or a different structure at runtime. + return (self._lazy_setfield is not None + and (optheap.getvalue(self._lazy_setfield.getarg(0)) + is not structvalue)) + + def getfield_from_cache(self, optheap, structvalue): + # Returns the up-to-date field's value, or None if not cached. + if self.possible_aliasing(optheap, structvalue): + self.force_lazy_setfield(optheap) + if self._lazy_setfield is not None: + op = self._lazy_setfield + assert optheap.getvalue(op.getarg(0)) is structvalue + return optheap.getvalue(op.getarg(1)) + else: + return self._cached_fields.get(structvalue, None) + + def remember_field_value(self, structvalue, fieldvalue): + assert self._lazy_setfield is None + self._cached_fields[structvalue] = fieldvalue + + def force_lazy_setfield(self, optheap): + op = self._lazy_setfield + if op is not None: + # This is the way _lazy_setfield is usually reset to None. + # Now we clear _cached_fields, because actually doing the + # setfield might impact any of the stored result (because of + # possible aliasing). + self._cached_fields.clear() + self._lazy_setfield = None + optheap.next_optimization.propagate_forward(op) + # Once it is done, we can put at least one piece of information + # back in the cache: the value of this particular structure's + # field. + structvalue = optheap.getvalue(op.getarg(0)) + fieldvalue = optheap.getvalue(op.getarg(1)) + self.remember_field_value(structvalue, fieldvalue) + + def get_reconstructed(self, optimizer, valuemap): + assert self._lazy_setfield is None + cf = CachedField() + for structvalue, fieldvalue in self._cached_fields.iteritems(): + structvalue2 = structvalue.get_reconstructed(optimizer, valuemap) + fieldvalue2 = fieldvalue .get_reconstructed(optimizer, valuemap) + cf._cached_fields[structvalue2] = fieldvalue2 + return cf + class CachedArrayItems(object): def __init__(self): @@ -20,40 +114,23 @@ """Cache repeated heap accesses""" def __init__(self): - # cached fields: {descr: {OptValue_instance: OptValue_fieldvalue}} + # cached fields: {descr: CachedField} self.cached_fields = {} - self.known_heap_fields = {} + self._lazy_setfields = [] # cached array items: {descr: CachedArrayItems} self.cached_arrayitems = {} - # lazily written setfields (at most one per descr): {descr: op} - self.lazy_setfields = {} - self.lazy_setfields_descrs = [] # keys (at least) of previous dict def reconstruct_for_next_iteration(self, optimizer, valuemap): new = OptHeap() if True: self.force_all_lazy_setfields() - assert not self.lazy_setfields_descrs - assert not self.lazy_setfields else: - new.lazy_setfields_descrs = self.lazy_setfields_descrs - new.lazy_setfields = self.lazy_setfields + assert 0 # was: new.lazy_setfields = self.lazy_setfields for descr, d in self.cached_fields.items(): - newd = {} - new.cached_fields[descr] = newd - for value, fieldvalue in d.items(): - newd[value.get_reconstructed(optimizer, valuemap)] = \ - fieldvalue.get_reconstructed(optimizer, valuemap) - - for descr, d in self.known_heap_fields.items(): - newd = {} - new.known_heap_fields[descr] = newd - for value, fieldvalue in d.items(): - newd[value.get_reconstructed(optimizer, valuemap)] = \ - fieldvalue.get_reconstructed(optimizer, valuemap) - + new.cached_fields[descr] = d.get_reconstructed(optimizer, valuemap) + new.cached_arrayitems = {} for descr, d in self.cached_arrayitems.items(): newd = {} @@ -74,30 +151,16 @@ return new def clean_caches(self): + del self._lazy_setfields[:] self.cached_fields.clear() - self.known_heap_fields.clear() self.cached_arrayitems.clear() - def cache_field_value(self, descr, value, fieldvalue, write=False): - if write: - # when seeing a setfield, we have to clear the cache for the same - # field on any other structure, just in case they are aliasing - # each other - d = self.cached_fields[descr] = {} - else: - d = self.cached_fields.setdefault(descr, {}) - d[value] = fieldvalue - - def read_cached_field(self, descr, value): - # XXX self.cached_fields and self.lazy_setfields should probably - # be merged somehow - d = self.cached_fields.get(descr, None) - if d is None: - op = self.lazy_setfields.get(descr, None) - if op is None: - return None - return self.getvalue(op.getarg(1)) - return d.get(value, None) + def field_cache(self, descr): + try: + cf = self.cached_fields[descr] + except KeyError: + cf = self.cached_fields[descr] = CachedField() + return cf def cache_arrayitem_value(self, descr, value, indexvalue, fieldvalue, write=False): d = self.cached_arrayitems.get(descr, None) @@ -179,8 +242,8 @@ for fielddescr in effectinfo.write_descrs_fields: self.force_lazy_setfield(fielddescr) try: - del self.cached_fields[fielddescr] - del self.known_heap_fields[fielddescr] + cf = self.cached_fields[fielddescr] + cf._cached_fields.clear() except KeyError: pass for arraydescr in effectinfo.write_descrs_arrays: @@ -205,58 +268,55 @@ assert value.is_constant() newvalue = self.getvalue(value.box) if value is not newvalue: - for d in self.cached_fields.values(): - if value in d: - d[newvalue] = d[value] - # FIXME: Update the other caches too? - - - def force_lazy_setfield(self, descr, before_guard=False): + for cf in self.cached_fields.itervalues(): + if value in cf._cached_fields: + cf._cached_fields[newvalue] = cf._cached_fields[value] + + def force_lazy_setfield(self, descr): try: - op = self.lazy_setfields[descr] + cf = self.cached_fields[descr] except KeyError: return - del self.lazy_setfields[descr] - value = self.getvalue(op.getarg(0)) - fieldvalue = self.getvalue(op.getarg(1)) - try: - heapvalue = self.known_heap_fields[op.getdescr()][value] - if fieldvalue is heapvalue: - return - except KeyError: - pass - self.next_optimization.propagate_forward(op) + cf.force_lazy_setfield(self) + def fixup_guard_situation(self): # hackish: reverse the order of the last two operations if it makes # sense to avoid a situation like "int_eq/setfield_gc/guard_true", # which the backend (at least the x86 backend) does not handle well. newoperations = self.optimizer.newoperations - if before_guard and len(newoperations) >= 2: - lastop = newoperations[-1] - prevop = newoperations[-2] - # - is_comparison() for cases like "int_eq/setfield_gc/guard_true" - # - CALL_MAY_FORCE: "call_may_force/setfield_gc/guard_not_forced" - # - is_ovf(): "int_add_ovf/setfield_gc/guard_no_overflow" - opnum = prevop.getopnum() - lastop_args = lastop.getarglist() - if ((prevop.is_comparison() or opnum == rop.CALL_MAY_FORCE - or prevop.is_ovf()) - and prevop.result not in lastop_args): - newoperations[-2] = lastop - newoperations[-1] = prevop + if len(newoperations) < 2: + return + lastop = newoperations[-1] + if lastop.getopnum() != rop.SETFIELD_GC: + return + # - is_comparison() for cases like "int_eq/setfield_gc/guard_true" + # - CALL_MAY_FORCE: "call_may_force/setfield_gc/guard_not_forced" + # - is_ovf(): "int_add_ovf/setfield_gc/guard_no_overflow" + prevop = newoperations[-2] + opnum = prevop.getopnum() + if not (prevop.is_comparison() or opnum == rop.CALL_MAY_FORCE + or prevop.is_ovf()): + return + if prevop.result in lastop.getarglist(): + return + newoperations[-2] = lastop + newoperations[-1] = prevop def force_all_lazy_setfields(self): - if len(self.lazy_setfields_descrs) > 0: - for descr in self.lazy_setfields_descrs: - self.force_lazy_setfield(descr) - del self.lazy_setfields_descrs[:] + if len(self._lazy_setfields) > 0: + for cf in self._lazy_setfields: + if not we_are_translated(): + assert cf in self.cached_fields.values() + cf.force_lazy_setfield(self) + del self._lazy_setfields[:] def force_lazy_setfields_for_guard(self): pendingfields = [] - for descr in self.lazy_setfields_descrs: - try: - op = self.lazy_setfields[descr] - except KeyError: + for cf in self._lazy_setfields: + if not we_are_translated(): + assert cf in self.cached_fields.values() + op = cf._lazy_setfield + if op is None: continue # the only really interesting case that we need to handle in the # guards' resume data is that of a virtual object that is stored @@ -266,41 +326,27 @@ fieldvalue = self.getvalue(op.getarg(1)) if fieldvalue.is_virtual(): # this is the case that we leave to resume.py - pendingfields.append((descr, value.box, + pendingfields.append((op.getdescr(), value.box, fieldvalue.get_key_box())) else: - self.force_lazy_setfield(descr, before_guard=True) + cf.force_lazy_setfield(self) + self.fixup_guard_situation() return pendingfields - def force_lazy_setfield_if_necessary(self, op, value, write=False): - try: - op1 = self.lazy_setfields[op.getdescr()] - except KeyError: - if write: - self.lazy_setfields_descrs.append(op.getdescr()) - else: - if self.getvalue(op1.getarg(0)) is not value: - self.force_lazy_setfield(op.getdescr()) - def optimize_GETFIELD_GC(self, op): - value = self.getvalue(op.getarg(0)) - self.force_lazy_setfield_if_necessary(op, value) - # check if the field was read from another getfield_gc just before - # or has been written to recently - fieldvalue = self.read_cached_field(op.getdescr(), value) + structvalue = self.getvalue(op.getarg(0)) + cf = self.field_cache(op.getdescr()) + fieldvalue = cf.getfield_from_cache(self, structvalue) if fieldvalue is not None: self.make_equal_to(op.result, fieldvalue) return # default case: produce the operation - value.ensure_nonnull() + structvalue.ensure_nonnull() ###self.optimizer.optimize_default(op) self.emit_operation(op) # then remember the result of reading the field fieldvalue = self.getvalue(op.result) - self.cache_field_value(op.getdescr(), value, fieldvalue) - # keep track of what's on the heap - d = self.known_heap_fields.setdefault(op.getdescr(), {}) - d[value] = fieldvalue + cf.remember_field_value(structvalue, fieldvalue) def optimize_SETFIELD_GC(self, op): if self.has_pure_result(rop.GETFIELD_GC_PURE, [op.getarg(0)], @@ -309,14 +355,8 @@ (op.getdescr().repr_of_descr())) raise BogusPureField # - value = self.getvalue(op.getarg(0)) - fieldvalue = self.getvalue(op.getarg(1)) - cached_fieldvalue = self.read_cached_field(op.getdescr(), value) - if fieldvalue is not cached_fieldvalue: - self.force_lazy_setfield_if_necessary(op, value, write=True) - self.lazy_setfields[op.getdescr()] = op - # remember the result of future reads of the field - self.cache_field_value(op.getdescr(), value, fieldvalue, write=True) + cf = self.field_cache(op.getdescr()) + cf.do_setfield(self, op) def optimize_GETARRAYITEM_GC(self, op): value = self.getvalue(op.getarg(0)) From commits-noreply at bitbucket.org Thu Mar 24 18:09:24 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 24 Mar 2011 18:09:24 +0100 (CET) Subject: [pypy-svn] pypy default: Added --ext support for third-party modules. Message-ID: <20110324170924.B92872A2033@codespeak.net> Author: tav Branch: Changeset: r42905:e6ff7b6a2fe1 Date: 2011-03-24 17:09 +0000 http://bitbucket.org/pypy/pypy/changeset/e6ff7b6a2fe1/ Log: Added --ext support for third-party modules. diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -365,7 +365,11 @@ def setbuiltinmodule(self, importname): """NOT_RPYTHON. load a lazy pypy/module and put it into sys.modules""" - fullname = "pypy.module.%s" % importname + if '.' in importname: + fullname = importname + importname = fullname.rsplit('.', 1)[1] + else: + fullname = "pypy.module.%s" % importname Module = __import__(fullname, None, None, ["Module"]).Module @@ -428,6 +432,11 @@ if value and name not in modules: modules.append(name) + if self.config.objspace.extmodules: + for name in self.config.objspace.extmodules.split(','): + if name not in modules: + modules.append(name) + # a bit of custom logic: time2 or rctime take precedence over time # XXX this could probably be done as a "requires" in the config if ('time2' in modules or 'rctime' in modules) and 'time' in modules: diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -159,6 +159,11 @@ cmdline="--allworkingmodules", negation=True), + StrOption("extmodules", + "Comma-separated list of third-party extension modules", + cmdline="--ext", + default=None), + BoolOption("translationmodules", "use only those modules that are needed to run translate.py on pypy", default=False, From commits-noreply at bitbucket.org Thu Mar 24 18:32:28 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 24 Mar 2011 18:32:28 +0100 (CET) Subject: [pypy-svn] pypy default: Added documentation on the new extmodules config option. Message-ID: <20110324173228.089FA2A2033@codespeak.net> Author: tav Branch: Changeset: r42906:331f1f55cf74 Date: 2011-03-24 17:32 +0000 http://bitbucket.org/pypy/pypy/changeset/331f1f55cf74/ Log: Added documentation on the new extmodules config option. diff --git a/pypy/doc/config/objspace.extmodules.rst b/pypy/doc/config/objspace.extmodules.rst new file mode 100644 --- /dev/null +++ b/pypy/doc/config/objspace.extmodules.rst @@ -0,0 +1,12 @@ +You can pass a comma-separated list of third-party builtin modules +which should be translated along with the standard modules within +``pypy.module``. + +The module names need to be fully qualified (i.e. have a ``.`` in them), +be on the ``$PYTHONPATH`` and not conflict with any existing ones, e.g. +``mypkg.somemod``. + +Once translated, the module will be accessible with a simple:: + + import somemod + diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -160,7 +160,7 @@ negation=True), StrOption("extmodules", - "Comma-separated list of third-party extension modules", + "Comma-separated list of third-party builtin modules", cmdline="--ext", default=None), From commits-noreply at bitbucket.org Thu Mar 24 18:44:25 2011 From: commits-noreply at bitbucket.org (fijal) Date: Thu, 24 Mar 2011 18:44:25 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Aha! This is the offender. Don't do it at this level Message-ID: <20110324174425.A90232A2033@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42907:28e13536d138 Date: 2011-03-24 11:44 -0600 http://bitbucket.org/pypy/pypy/changeset/28e13536d138/ Log: Aha! This is the offender. Don't do it at this level diff --git a/pypy/jit/tl/pypyjit.py b/pypy/jit/tl/pypyjit.py --- a/pypy/jit/tl/pypyjit.py +++ b/pypy/jit/tl/pypyjit.py @@ -101,7 +101,7 @@ # first annotate, rtype, and backendoptimize PyPy try: - interp, graph = get_interpreter(entry_point, [], backendopt=True, + interp, graph = get_interpreter(entry_point, [], backendopt=False, config=config, type_system=config.translation.type_system, policy=PyPyAnnotatorPolicy(space)) From commits-noreply at bitbucket.org Thu Mar 24 19:34:31 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 19:34:31 +0100 (CET) Subject: [pypy-svn] pypy default: Test and improvement. Also a fix for another case, but it's hard to test. Message-ID: <20110324183431.D888E2A2033@codespeak.net> Author: Armin Rigo Branch: Changeset: r42908:8caf8ba85fc3 Date: 2011-03-24 18:47 +0100 http://bitbucket.org/pypy/pypy/changeset/8caf8ba85fc3/ Log: Test and improvement. Also a fix for another case, but it's hard to test. diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -2066,6 +2066,23 @@ """ self.optimize_loop(ops, expected) + def test_dont_force_setfield_around_copystrcontent(self): + ops = """ + [p0, i0, p1, i1, i2] + setfield_gc(p0, i1, descr=valuedescr) + copystrcontent(p0, i0, p1, i1, i2) + escape() + jump(p0, i0, p1, i1, i2) + """ + expected = """ + [p0, i0, p1, i1, i2] + copystrcontent(p0, i0, p1, i1, i2) + setfield_gc(p0, i1, descr=valuedescr) + escape() + jump(p0, i0, p1, i1, i2) + """ + self.optimize_loop(ops, expected) + def test_duplicate_getarrayitem_1(self): ops = """ [p1] diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -220,11 +220,15 @@ self.optimizer.pendingfields = self.force_lazy_setfields_for_guard() return opnum = op.getopnum() - if (opnum == rop.SETFIELD_GC or - opnum == rop.SETFIELD_RAW or - opnum == rop.SETARRAYITEM_GC or - opnum == rop.SETARRAYITEM_RAW or - opnum == rop.DEBUG_MERGE_POINT): + if (opnum == rop.SETFIELD_GC or # handled specially + opnum == rop.SETFIELD_RAW or # no effect on GC struct/array + opnum == rop.SETARRAYITEM_GC or # handled specially + opnum == rop.SETARRAYITEM_RAW or # no effect on GC struct + opnum == rop.STRSETITEM or # no effect on GC struct/array + opnum == rop.UNICODESETITEM or # no effect on GC struct/array + opnum == rop.DEBUG_MERGE_POINT or # no effect whatsoever + opnum == rop.COPYSTRCONTENT or # no effect on GC struct/array + opnum == rop.COPYUNICODECONTENT): # no effect on GC struct/array return assert opnum != rop.CALL_PURE if (opnum == rop.CALL or @@ -257,10 +261,7 @@ # ^^^ we only need to force this field; the other fields # of virtualref_info and virtualizable_info are not gcptrs. return - self.force_all_lazy_setfields() - elif op.is_final() or (not we_are_translated() and - op.getopnum() < 0): # escape() operations - self.force_all_lazy_setfields() + self.force_all_lazy_setfields() self.clean_caches() @@ -303,12 +304,10 @@ newoperations[-1] = prevop def force_all_lazy_setfields(self): - if len(self._lazy_setfields) > 0: - for cf in self._lazy_setfields: - if not we_are_translated(): - assert cf in self.cached_fields.values() - cf.force_lazy_setfield(self) - del self._lazy_setfields[:] + for cf in self._lazy_setfields: + if not we_are_translated(): + assert cf in self.cached_fields.values() + cf.force_lazy_setfield(self) def force_lazy_setfields_for_guard(self): pendingfields = [] From commits-noreply at bitbucket.org Thu Mar 24 19:34:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 19:34:32 +0100 (CET) Subject: [pypy-svn] pypy default: Systematically list both SETFIELD and SETARRAYITEM. Message-ID: <20110324183432.6C6692A2033@codespeak.net> Author: Armin Rigo Branch: Changeset: r42909:e6acea94037b Date: 2011-03-24 19:33 +0100 http://bitbucket.org/pypy/pypy/changeset/e6acea94037b/ Log: Systematically list both SETFIELD and SETARRAYITEM. diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -288,7 +288,8 @@ if len(newoperations) < 2: return lastop = newoperations[-1] - if lastop.getopnum() != rop.SETFIELD_GC: + if (lastop.getopnum() != rop.SETFIELD_GC and + lastop.getopnum() != rop.SETARRAYITEM_GC): return # - is_comparison() for cases like "int_eq/setfield_gc/guard_true" # - CALL_MAY_FORCE: "call_may_force/setfield_gc/guard_not_forced" From commits-noreply at bitbucket.org Thu Mar 24 19:34:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 19:34:32 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110324183432.B12EC2A2035@codespeak.net> Author: Armin Rigo Branch: Changeset: r42910:4e16d7f5af44 Date: 2011-03-24 19:34 +0100 http://bitbucket.org/pypy/pypy/changeset/4e16d7f5af44/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 24 20:04:32 2011 From: commits-noreply at bitbucket.org (lac) Date: Thu, 24 Mar 2011 20:04:32 +0100 (CET) Subject: [pypy-svn] pypy default: change these files to look for .rst . The real fix will remove them, though, Message-ID: <20110324190432.48EA12A2033@codespeak.net> Author: Laura Creighton Branch: Changeset: r42911:893582ee0333 Date: 2011-03-24 20:02 +0100 http://bitbucket.org/pypy/pypy/changeset/893582ee0333/ Log: change these files to look for .rst . The real fix will remove them, though, I think. diff --git a/pypy/doc/config/confrest.py b/pypy/doc/config/confrest.py --- a/pypy/doc/config/confrest.py +++ b/pypy/doc/config/confrest.py @@ -7,7 +7,6 @@ all_optiondescrs = [pypyoption.pypy_optiondescription, translationoption.translation_optiondescription, ] - start_to_descr = dict([(descr._name, descr) for descr in all_optiondescrs]) class PyPyPage(PyPyPage): @@ -29,7 +28,7 @@ Page = PyPyPage def get_content(self, txtpath, encoding): - if txtpath.basename == "commandline.txt": + if txtpath.basename == "commandline.rst": result = [] for line in txtpath.read().splitlines(): if line.startswith('.. GENERATE:'): diff --git a/pypy/doc/config/generate.py b/pypy/doc/config/generate.py --- a/pypy/doc/config/generate.py +++ b/pypy/doc/config/generate.py @@ -8,8 +8,8 @@ for descr in all_optiondescrs: prefix = descr._name c = config.Config(descr) - thisdir.join(prefix + ".txt").ensure() + thisdir.join(prefix + ".rst").ensure() for p in c.getpaths(include_groups=True): - basename = prefix + "." + p + ".txt" + basename = prefix + "." + p + ".rst" f = thisdir.join(basename) f.ensure() From commits-noreply at bitbucket.org Thu Mar 24 20:04:32 2011 From: commits-noreply at bitbucket.org (lac) Date: Thu, 24 Mar 2011 20:04:32 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110324190432.9773E2A2034@codespeak.net> Author: Laura Creighton Branch: Changeset: r42912:af69a87ee4a6 Date: 2011-03-24 20:04 +0100 http://bitbucket.org/pypy/pypy/changeset/af69a87ee4a6/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 24 21:15:07 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 24 Mar 2011 21:15:07 +0100 (CET) Subject: [pypy-svn] pypy default: Added tests for the new extmodules config option. Message-ID: <20110324201507.7883E282BA1@codespeak.net> Author: tav Branch: Changeset: r42913:87c9a7050e13 Date: 2011-03-24 20:12 +0000 http://bitbucket.org/pypy/pypy/changeset/87c9a7050e13/ Log: Added tests for the new extmodules config option. diff --git a/pypy/interpreter/test/test_extmodules.py b/pypy/interpreter/test/test_extmodules.py new file mode 100644 --- /dev/null +++ b/pypy/interpreter/test/test_extmodules.py @@ -0,0 +1,66 @@ +import sys + +from pypy.config.pypyoption import get_pypy_config +from pypy.objspace.std import StdObjSpace +from pypy.tool.udir import udir + +mod_init = """ +from pypy.interpreter.mixedmodule import MixedModule + +import time + +class Module(MixedModule): + + appleveldefs = {} + + interpleveldefs = { + 'clock' : 'interp_time.clock', + 'time' : 'interp_time.time_', + 'sleep' : 'interp_time.sleep', + } +""" + +mod_interp = """ +import time + +from pypy.interpreter.gateway import unwrap_spec + +def clock(space): + return space.wrap(time.clock()) + +def time_(space): + return space.wrap(time.time()) + + at unwrap_spec(seconds=float) +def sleep(space, seconds): + time.sleep(seconds) +""" + +old_sys_path = [] + +def init_extmodule_code(): + pkg = udir.join("testext") + pkg.ensure(dir=True) + pkg.join("__init__.py").write("# package") + mod = pkg.join("extmod") + mod.ensure(dir=True) + mod.join("__init__.py").write(mod_init) + mod.join("interp_time.py").write(mod_interp) + +class AppTestExtModules(object): + def setup_class(cls): + init_extmodule_code() + conf = get_pypy_config() + conf.objspace.extmodules = 'testext.extmod' + old_sys_path[:] = sys.path[:] + sys.path.insert(0, str(udir)) + space = StdObjSpace(conf) + cls.space = space + + def teardown_class(cls): + sys.path[:] = old_sys_path + + def test_import(self): + import extmod + assert extmod.__file__.endswith('extmod') + assert type(extmod.time()) is float From commits-noreply at bitbucket.org Thu Mar 24 21:15:10 2011 From: commits-noreply at bitbucket.org (tav) Date: Thu, 24 Mar 2011 21:15:10 +0100 (CET) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110324201510.C34E5282BAA@codespeak.net> Author: tav Branch: Changeset: r42914:16df517b12a5 Date: 2011-03-24 20:13 +0000 http://bitbucket.org/pypy/pypy/changeset/16df517b12a5/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 24 23:04:04 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:04 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: use Verbatim environments Message-ID: <20110324220404.DEAA0282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3396:c2a1785e515c Date: 2011-03-24 16:45 +0100 http://bitbucket.org/pypy/extradoc/changeset/c2a1785e515c/ Log: use Verbatim environments diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -223,11 +223,10 @@ If the fragment is traced with \texttt{x} being \texttt{4}, the following trace is produced: % -\begin{quote}{\ttfamily \raggedright \noindent -guard(x~==~4)\\ -y~=~y~+~x -} -\end{quote} +\begin{Verbatim} +guard(x == 4) +y = y + x +\end{Verbatim} In the trace above, the value of \texttt{x} is statically known thanks to the guard. Remember that a guard is a runtime check. The above trace will run to @@ -255,14 +254,13 @@ \end{Verbatim} We get a trace that looks like this: -% -\begin{quote}{\ttfamily \raggedright \noindent -v1~=~x~*~2\\ -z~=~v1~+~1\\ -v2~=~z~+~y\\ + +\begin{Verbatim} +v1 = x * 2 +z = v1 + 1 +v2 = z + y return(v2) -} -\end{quote} +\end{Verbatim} Observe how the first two operations could be constant-folded if the value of \texttt{x} were known. Let's assume that the value of \texttt{x} can vary, but does so @@ -282,15 +280,14 @@ is done. Let's assume that this changed function is traced with the arguments \texttt{4} and \texttt{8}. The trace will be the same, except for one operation at the beginning: -% -\begin{quote}{\ttfamily \raggedright \noindent -guard(x~==~4)\\ -v1~=~x~*~2\\ -z~=~v1~+~1\\ -v2~=~z~+~y\\ + +\begin{Verbatim} +guard(x == 4) +v1 = x * 2 +z = v1 + 1 +v2 = z + y return(v2) -} -\end{quote} +\end{Verbatim} The promotion is turned into a \texttt{guard} operation in the trace. The guard captures the value of \texttt{x} as it was at runtime. From the point of view of the @@ -298,13 +295,12 @@ statement in the example above. After the guard, the rest of the trace can assume that \texttt{x} is equal to \texttt{4}, meaning that the optimizer will turn this trace into: -% -\begin{quote}{\ttfamily \raggedright \noindent -guard(x~==~4)\\ -v2~=~9~+~y\\ + +\begin{Verbatim} +guard(x == 4) +v2 = 9 + y return(v2) -} -\end{quote} +\end{Verbatim} Notice how the first two arithmetic operations were constant folded. The hope is that the guard is executed quicker than the multiplication and the addition that @@ -315,13 +311,12 @@ enough, a new trace will be started from the guard. This other trace will capture a different value of \texttt{x}. If it is e.g. \texttt{2}, then the optimized trace looks like this: -% -\begin{quote}{\ttfamily \raggedright \noindent -guard(x~==~2)\\ -v2~=~5~+~y\\ + +\begin{Verbatim} +guard(x == 2) +v2 = 5 + y return(v2) -} -\end{quote} +\end{Verbatim} This new trace will be attached to the guard instruction of the first trace. If \texttt{x} takes on even more values, a new trace will eventually be made for all of them, @@ -369,14 +364,13 @@ Tracing the call \texttt{a.f(10)} of some instance of \texttt{A} yields the following trace (note how the call to \texttt{compute} is inlined): % -\begin{quote}{\ttfamily \raggedright \noindent -x~=~a.x\\ -v1~=~x~*~2\\ -v2~=~v1~+~1\\ -v3~=~v2~+~val\\ -a.y~=~v3 -} -\end{quote} +\begin{Verbatim} +x = a.x +v1 = x * 2 +v2 = v1 + 1 +v3 = v2 + val +a.y = v3 +\end{Verbatim} In this case, adding a promote of \texttt{self} in the \texttt{f} method to get rid of the computation of the first few operations does not help. Even if \texttt{a} is a @@ -405,13 +399,12 @@ Now the trace will look like this: % -\begin{quote}{\ttfamily \raggedright \noindent -guard(a~==~0xb73984a8)\\ -v1~=~compute(a)\\ -v2~=~v1~+~val\\ -a.y~=~v2 -} -\end{quote} +\begin{Verbatim} +guard(a == 0xb73984a8) +v1 = compute(a) +v2 = v1 + val +a.y = v2 +\end{Verbatim} Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used during tracing. The call to \texttt{compute} is not inlined, so that the optimizer @@ -420,12 +413,11 @@ is a constant reference, the call will be removed by the optimizer. The final trace looks like this: % -\begin{quote}{\ttfamily \raggedright \noindent -guard(a~==~0xb73984a8)\\ -v2~=~9~+~val\\ -a.y~=~v2 -} -\end{quote} +\begin{Verbatim} +guard(a == 0xb73984a8) +v2 = 9 + val +a.y = v2 +\end{Verbatim} (assuming that the \texttt{x} field's value is \texttt{4}). From commits-noreply at bitbucket.org Thu Mar 24 23:04:05 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:05 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: reshuffle some stuff, kill one of the traces Message-ID: <20110324220405.A7607282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3397:5f81dd1eaa24 Date: 2011-03-24 16:46 +0100 http://bitbucket.org/pypy/extradoc/changeset/5f81dd1eaa24/ Log: reshuffle some stuff, kill one of the traces diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -55,6 +55,7 @@ \setlength{\topsep} {0 pt} }}% the end stuff {\end{list}} +\definecolor{gray}{rgb}{0.5,0.5,0.5} \begin{document} @@ -94,35 +95,29 @@ of the programs running on their interpreters? -%___________________________________________________________________________ -\section{The PyPy Project} +\section{Background} +\label{sec:Background} + +\subsection{The PyPy Project} \label{sect:pypy} -XXX -\cite{armin_rigo_pypys_2006} - - -%___________________________________________________________________________ -\section{Tracing JIT Compilers} -\label{sect:tracing} - -XXX - -%___________________________________________________________________________ -\section{Controlling The Extent of Tracing} - - -\subsection{Background} - -First, let's recap some basics: PyPy's approach to implementing dynamic +PyPy's approach to implementing dynamic languages is to write an interpreter for the language in RPython. This interpreter can be translated to C and then further to machine code. The interpreter consists of code in the form of a large number of generated C functions and some data. Similarly, the user program consists of functions in the language the interpreter executes. -XXX As was explained in a \href{http://morepypy.blogspot.com/2009/03/applying-tracing-jit-to-interpreter.html}{blog post} and a \href{http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf}{paper} two years ago, PyPy's JIT is a -meta-tracer. Since we want to re-use our tracer for a variety of languages, we +XXX \cite{armin_rigo_pypys_2006} + + +%___________________________________________________________________________ +\subsection{PyPy's Meta-Tracing JIT Compilers} +\label{sect:tracing} + + +PyPy's JIT is a meta-tracer \cite{bolz_tracing_2009}. Since we want to re-use +our tracer for a variety of languages, we don't trace the execution of the user program, but instead trace the execution of the \emph{interpreter} that is running the program. This means that the traces don't contain the bytecodes of the language in question, but RPython-level @@ -148,25 +143,13 @@ of the interpreter. However, the extent of the trace is determined by the loops in the user program. - - -\section{Controlling Optimization} - -The last section described how to control the -extent of tracing. In this section we will describe how to add hints that -influence the optimizer. If applied correctly these techniques can give -really big speedups by pre-computing parts of what happens at runtime. On the other -hand, if applied incorrectly they might lead to code bloat, thus making the -resulting program actually slower. - - - -\subsection{Background} +\subsection{Optimizing Traces} +\label{sub:optimizing} Before sending the trace to the backend to produce actual machine code, it is optimized. The optimizer applies a number of techniques to remove or reduce -the number of operations: most of these are well known \href{http://en.wikipedia.org/wiki/Compiler_optimization\#Optimization_techniques}{compiler optimization -techniques}, with the difference that it is easier to apply them in a tracing +the number of operations: most of these are well known compiler optimization +techniques, with the difference that it is easier to apply them in a tracing JIT because it only has to deal with linear traces. Among the techniques: % \begin{itemize} @@ -181,11 +164,22 @@ of the interpreter with these optimizations in mind the traces that are produced by the optimizer can be vastly improved. -In this section we describe two hints that allow the interpreter author to -increase the optimization opportunities for constant folding. For constant -folding to work, two conditions need -to be met: -% + +% section Background (end) +%___________________________________________________________________________ + + +\section{Controlling Optimization} + +In this section we will describe how to add two hints that allow the +interpreter author to increase the optimization opportunities for constant +folding. If applied correctly these techniques can give really big speedups by +pre-computing parts of what happens at runtime. On the other +hand, if applied incorrectly they might lead to code bloat, thus making the +resulting program actually slower. + +For constant folding to work, two conditions need to be met: + \begin{itemize} \item the arguments of an operation actually need to all be constant, i.e. statically known by the optimizer @@ -198,9 +192,6 @@ interpreter author can apply \textbf{hints} to improve the optimization opportunities. There is one kind of hint for both of the conditions above. -\textbf{Note}: These hints are written by an interpreter developer and applied to the -RPython source of the interpreter. Normal Python users will never see them. - \subsection{Where Do All the Constants Come From} @@ -235,10 +226,10 @@ There are cases in which it is useful to turn an arbitrary variable into a constant value. This process is called \emph{promotion} and it is an old idea -in partial evaluation (it's called ``the trick'' there). Promotion is also heavily -used by \href{http://psyco.sourceforge.net/}{Psyco} and by all older versions of PyPy's JIT. Promotion is a technique -that only works well in JIT compilers, in -static compilers it is significantly less applicable. +in partial evaluation (it's called ``the trick'' \cite{XXX} there). Promotion is also heavily +used by Psyco \cite{rigo_representation-based_2004} and by all older versions +of PyPy's JIT. Promotion is a technique that only works well in JIT compilers, +in static compilers it is significantly less applicable. Promotion is essentially a tool for trace specialization. In some places in the interpreter it would be very useful if a variable were constant, even though it @@ -569,7 +560,15 @@ With this changed instance implementation, the trace we had above changes to the following, where \texttt{0xb74af4a8} is the memory address of the Map instance that -has been promoted, see Figure~\ref{fig:trace2}. +has been promoted, see Figure~\ref{fig:trace2}. Operations that can be +optimized away are grayed out. + +The calls to \texttt{Map.getindex} can be optimized away, because they are calls to +a pure function and they have constant arguments. That means that \texttt{index1/2/3} +are constant and the guards on them can be removed. All but the first guard on +the map will be optimized away too, because the map cannot have changed in +between. This trace is already much better than +the original one. Now we are down from five dictionary lookups to just two. \begin{figure} \input{code/trace2.tex} @@ -577,21 +576,7 @@ \label{fig:trace2} \end{figure} -The calls to \texttt{Map.getindex} can be optimized away, because they are calls to -a pure function and they have constant arguments. That means that \texttt{index1/2/3} -are constant and the guards on them can be removed. All but the first guard on -the map will be optimized away too, because the map cannot have changed in -between. The optimized trace looks can be seen in Figure~\ref{fig:trace3} -\begin{figure} -\input{code/trace3.tex} -\caption{Optimized Trace After the Introduction of Maps} -\label{fig:trace3} -\end{figure} - -The index \texttt{0} that is used to read out of the \texttt{storage} array is the result -of the constant-folded \texttt{getindex} call. This trace is already much better than -the original one. Now we are down from five dictionary lookups to just two. %___________________________________________________________________________ @@ -646,6 +631,8 @@ \label{fig:trace5} \end{figure} +The index \texttt{0} that is used to read out of the \texttt{storage} array is the result +of the constant-folded \texttt{getindex} call. The constants \texttt{41} and \texttt{17} are the results of the folding of the \texttt{\_find\_method`} calls. This final trace is now very good. It no longer performs any dictionary lookups. Instead it contains several guards. The first guard diff --git a/talk/icooolps2011/code/trace2.tex b/talk/icooolps2011/code/trace2.tex --- a/talk/icooolps2011/code/trace2.tex +++ b/talk/icooolps2011/code/trace2.tex @@ -1,17 +1,17 @@ -\begin{Verbatim} +\begin{Verbatim}[commandchars=\\\{\}] # inst.getattr("a") map1 = inst.map guard(map1 == 0xb74af4a8) -index1 = Map.getindex(map1, "a") -guard(index1 != -1) +{\color{gray}index1 = Map.getindex(map1, "a")} +{\color{gray}guard(index1 != -1)} storage1 = inst.storage result1 = storage1[index1] # inst.getattr("b") -map2 = inst.map -guard(map2 == 0xb74af4a8) -index2 = Map.getindex(map2, "b") -guard(index2 == -1) +{\color{gray}map2 = inst.map} +{\color{gray}guard(map2 == 0xb74af4a8)} +{\color{gray}index2 = Map.getindex(map2, "b")} +{\color{gray}guard(index2 == -1)} cls1 = inst.cls methods1 = cls.methods result2 = dict.get(methods1, "b") @@ -19,10 +19,10 @@ v2 = result1 + result2 # inst.getattr("c") -map3 = inst.map -guard(map3 == 0xb74af4a8) -index3 = Map.getindex(map3, "c") -guard(index3 == -1) +{\color{gray}map3 = inst.map} +{\color{gray}guard(map3 == 0xb74af4a8)} +{\color{gray}index3 = Map.getindex(map3, "c")} +{\color{gray}guard(index3 == -1)} cls1 = inst.cls methods2 = cls.methods result3 = dict.get(methods2, "c") From commits-noreply at bitbucket.org Thu Mar 24 23:04:06 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:06 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: more graying out Message-ID: <20110324220406.2C918282BAA@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3398:cc036fd72baf Date: 2011-03-24 16:51 +0100 http://bitbucket.org/pypy/extradoc/changeset/cc036fd72baf/ Log: more graying out diff --git a/talk/icooolps2011/code/trace4.tex b/talk/icooolps2011/code/trace4.tex --- a/talk/icooolps2011/code/trace4.tex +++ b/talk/icooolps2011/code/trace4.tex @@ -1,36 +1,36 @@ -\begin{Verbatim} +\begin{Verbatim}[commandchars=\\\{\}] # inst.getattr("a") map1 = inst.map guard(map1 == 0xb74af4a8) -index1 = Map.getindex(map1, "a") -guard(index1 != -1) +{\color{gray}index1 = Map.getindex(map1, "a")} +{\color{gray}guard(index1 != -1)} storage1 = inst.storage result1 = storage1[index1] # inst.getattr("b") -map2 = inst.map -guard(map2 == 0xb74af4a8) -index2 = Map.getindex(map2, "b") -guard(index2 == -1) +{\color{gray}map2 = inst.map} +{\color{gray}guard(map2 == 0xb74af4a8)} +{\color{gray}index2 = Map.getindex(map2, "b")} +{\color{gray}guard(index2 == -1)} cls1 = inst.cls guard(cls1 == 0xb7aaaaf8) version1 = cls1.version guard(version1 == 0xb7bbbb18) -result2 = Class._find_method(cls, "b", version1) -guard(result2 is not None) +{\color{gray}result2 = Class._find_method(cls, "b", version1)} +{\color{gray}guard(result2 is not None)} v2 = result1 + result2 # inst.getattr("c") -map3 = inst.map -guard(map3 == 0xb74af4a8) -index3 = Map.getindex(map3, "c") -guard(index3 == -1) -cls2 = inst.cls -guard(cls2 == 0xb7aaaaf8) -version2 = cls2.version -guard(version2 == 0xb7bbbb18) -result3 = Class._find_method(cls, "c", version2) -guard(result3 is not None) +{\color{gray}map3 = inst.map} +{\color{gray}guard(map3 == 0xb74af4a8)} +{\color{gray}index3 = Map.getindex(map3, "c")} +{\color{gray}guard(index3 == -1)} +{\color{gray}cls2 = inst.cls} +{\color{gray}guard(cls2 == 0xb7aaaaf8)} +{\color{gray}version2 = cls2.version} +{\color{gray}guard(version2 == 0xb7bbbb18)} +{\color{gray}result3 = Class._find_method(cls, "c", version2)} +{\color{gray}guard(result3 is not None)} v4 = v2 + result3 return(v4) From commits-noreply at bitbucket.org Thu Mar 24 23:04:07 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:07 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: reorder and generally improve things. regenerate the figure Message-ID: <20110324220407.5917B282BAA@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3399:213c2b87da34 Date: 2011-03-24 20:38 +0100 http://bitbucket.org/pypy/extradoc/changeset/213c2b87da34/ Log: reorder and generally improve things. regenerate the figure diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -11,13 +11,6 @@ \input{code/style.tex} -\ifthenelse{\isundefined{\hypersetup}}{ - \usepackage[colorlinks=true,linkcolor=blue,urlcolor=blue]{hyperref} -}{} -\hypersetup{ - pdftitle={Controlling the Tracing of an Interpreter With Hints, Part 1: Controlling the Extent of Tracing}, -} - \newboolean{showcomments} \setboolean{showcomments}{true} \ifthenelse{\boolean{showcomments}} @@ -137,8 +130,7 @@ Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left you see the levels of execution. The CPU executes the binary of PyPy's Python interpreter, which consists of RPython functions that have been -compiled first to C, then to machine code. Some of these functions contain -loops, others don't. The interpreter runs a Python program written by a +compiled first to C, then to machine code. The interpreter runs a Python program written by a programmer (the user). If the tracer is used, it traces operations on the level of the interpreter. However, the extent of the trace is determined by the loops in the user program. @@ -147,8 +139,8 @@ \label{sub:optimizing} Before sending the trace to the backend to produce actual machine code, it is -optimized. The optimizer applies a number of techniques to remove or reduce -the number of operations: most of these are well known compiler optimization +optimized. The optimizer applies a number of techniques to remove or simplify +the operations in the trace. Most of these are well known compiler optimization techniques, with the difference that it is easier to apply them in a tracing JIT because it only has to deal with linear traces. Among the techniques: % @@ -164,6 +156,59 @@ of the interpreter with these optimizations in mind the traces that are produced by the optimizer can be vastly improved. +\subsection{Running Example} +\label{sub:running} + +As the running example of this paper we will use a very simple and bare-bones +object model that just supports classes and instances, without any +inheritance or other fancy features. The model has classes, which contain methods. +Instances have a class. Instances have their own attributes (or fields). When looking up an +attribute on an instance, the instances attributes are searched. If the +attribute is not found there, the class' methods are searched. + +\begin{figure} +\input{code/interpreter-slow.tex} +\caption{Original Version of a Simple Object Model} +\label{fig:interpreter-slow} +\end{figure} + + +To implement this object model, we could use the RPython code in +Figure~\ref{fig:interpreter-slow} as part of the interpreter source code. +In this straightforward implementation the methods and attributes are just +stored in dictionaries (hash maps) on the classes and instances, respectively. +While this object model is very +simple it already contains all the hard parts of Python's object model. Both +instances and classes can have arbitrary fields, and they are changeable at +any time. Moreover, instances can change their class after they have been +created. + +When using this object model in +an interpreter, a huge amount of time will be spent doing lookups in these +dictionaries. +Let's assume we trace through code that sums three attributes, such as: + +\begin{Verbatim} +inst.getattr("a") + inst.getattr("b") + inst.getattr("c") +\end{Verbatim} + +\begin{figure} +\input{code/trace1.tex} +\caption{Trace Through the Object Model} +\label{fig:trace1} +\end{figure} + +The trace would look like in Figure~\ref{fig:trace1}. In this example, the +attribute \texttt{a} is found on the instance, but the +attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains +five calls to \texttt{dict.get}, which is slow. To make the language efficient +using a tracing JIT, we need to find a way to get rid of these dictionary +lookups somehow. How to achieve this will be topic of +Section~\ref{sec:putting}. + + + +% subsection Running Example (end) % section Background (end) %___________________________________________________________________________ @@ -455,68 +500,13 @@ %___________________________________________________________________________ -\section{Putting Things Together} +\section{Putting It All Together} -In this section we describe a worked-out example of -a small object model for a dynamic language and how to make it efficient using -the hints described in the previous sections. - - -%___________________________________________________________________________ - -\subsection{A Simple Object Model} - -To implement a dynamic language efficiently, the operations on its objects need -to be fast. Most dynamic languages have object models that are made by using -dictionaries everywhere. Let's look at an example of how the JIT can be made to -optimize such operations. - -For the purpose of this section we will use a very simple and bare-bones -object model that just supports very simple classes and instances, without any -inheritance or any fancy features. The model has classes, which contain methods. -Instances have a class. Instances have their own attributes. When looking up an -attribute on an instance, the instances attributes are searched. If the -attribute is not found there, the class' attributes are searched. - -To implement this object model, we could use the following RPython code as part -of the interpreter source code: - -\begin{figure} -\input{code/interpreter-slow.tex} -\caption{Original Version of a Simple Object Model} -\label{fig:interpreter-slow} -\end{figure} - - -In this straightforward implementation the methods and attributes are just -stored in dictionaries on the classes/instances. While this object model is very -simple it already contains all the hard parts of Python's object model. Both -instances and classes can have arbitrary fields, and they are changeable at -any time. Moreover, instances can change their class after they have been -created. - -When using this object model in -an interpreter, a huge amount of time will be spent doing lookups in these -dictionaries. To make the language efficient using a tracing JIT, we need to -find a way to get rid of these dictionary lookups somehow. - -Let's assume we trace through code that sums three attributes, such as: - -\begin{Verbatim} -inst.getattr("a") + inst.getattr("b") + inst.getattr("c") -\end{Verbatim} - -\begin{figure} -\input{code/trace1.tex} -\caption{Trace Through the Object Model} -\label{fig:trace1} -\end{figure} - -The trace would look like in Figure~\ref{fig:trace1}. In this example, the -attribute \texttt{a} is found on the instance, but the -attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains -five calls to \texttt{dict.get}, which is slow. - +In this section we describe how the simple object model from +Section~\ref{sub:running} can be made efficient using the hints described in the +previous the section. The object model there is typical for many current +dynamic languages (such as Python, Ruby and JavaScript) as it relies heavily on +hash-maps to implement its objects. %___________________________________________________________________________ @@ -536,11 +526,11 @@ their set of keys) with many other instances. Therefore it makes sense to factor the layout information out of the instance -implementation into a shared object. This shared layout object is called a -\emph{map}. Maps are an old idea that comes originally from the SELF language \cite{XXX}. They are -also used by many JavaScript implementations such as V8. - -The rewritten \texttt{Instance} class using maps looks like this: +implementation into a shared object, called the \emph{map}. Maps are a well-known +technique to efficiently implement instances and come from the SELF project +\cite{XXX}. They are also used by many JavaScript implementations such as V8. +The rewritten \texttt{Instance} class using maps can be seen in +Figure~\ref{fig:maps}. \begin{figure} \input{code/map.tex} @@ -548,20 +538,20 @@ \label{fig:maps} \end{figure} -Instances no longer use dictionaries to store their fields. Instead, they have a +In this implementation instances no longer use dictionaries to store their fields. Instead, they have a reference to a map, which maps field names to indexes into a storage list. The storage list contains the actual field values. The maps are shared between objects with the same layout. Therefore they have to be immutable, which means that their \texttt{getindex} method is a pure function. When a new attribute is added to an instance, a new map needs to be chosen, which is done with the -\texttt{new\_map\_with\_additional\_attribute} method on the previous map. Now that we have +\texttt{add\_attribute} method on the previous map (which is also pure). Now that we have introduced maps, it is safe to promote the map everywhere, because we assume that the number of different instance layouts is small. With this changed instance implementation, the trace we had above changes to the -following, where \texttt{0xb74af4a8} is the memory address of the Map instance that -has been promoted, see Figure~\ref{fig:trace2}. Operations that can be -optimized away are grayed out. +following that of see Figure~\ref{fig:trace2}. There \texttt{0xb74af4a8} is the +memory address of the \texttt{Map} instance that has been promoted. Operations +that can be optimized away are grayed out. The calls to \texttt{Map.getindex} can be optimized away, because they are calls to a pure function and they have constant arguments. That means that \texttt{index1/2/3} @@ -584,7 +574,7 @@ \subsection{Versioning of Classes} Instances were optimized making the assumption that the total number of -Instance layouts is small compared to the number of instances. For classes we +different instance layouts is small compared to the number of instances. For classes we will make an even stronger assumption. We simply assume that it is rare for classes to change at all. This is not totally reasonable (sometimes classes contain counters or similar things) but for this simple example it is good enough. @@ -594,13 +584,13 @@ Every time the class changes, \texttt{find\_method} can potentially return a new value. -Therefore, we give every class a version number, which is increased every time a +Therefore, we give every class a version number, which is changed every time a class gets changed (i.e., the content of the \texttt{methods} dictionary changes). This means that the result of \texttt{methods.get()} for a given \texttt{(name, version)} pair will always be the same, i.e. it is a pure operation. To help the JIT to detect this case, we factor it out in a helper method which is -explicitly marked as \texttt{@purefunction}. The refactored \texttt{Class} looks like -in Figure~\ref{fig:version} +explicitly marked as \texttt{@purefunction}. The refactored \texttt{Class} can +be seen in Figure~\ref{fig:version} \begin{figure} \input{code/version.tex} @@ -610,8 +600,8 @@ What is interesting here is that \texttt{\_find\_method} takes the \texttt{version} argument but it does not use it at all. Its only purpose is to make the call -pure (because when the version number changes, the result of the call might be -different than the previous one). +pure, because when the version number changes, the result of the call might be +different than the previous one. \begin{figure} \input{code/trace4.tex} @@ -638,7 +628,7 @@ dictionary lookups. Instead it contains several guards. The first guard checks that the map is still the same. This guard will fail if the same code is executed with an instance that has another layout. The second guard -checks that the class of \texttt{inst} is still the same. It will fail if trace is +checks that the class of \texttt{inst} is still the same. It will fail if the trace is executed with an instance of another class. The third guard checks that the class did not change since the trace was produced. It will fail if somebody calls the \texttt{change\_method} method on the class. @@ -653,7 +643,8 @@ considerably more complex, some additional work needs to be done. The first problem that needs to be solved is that Python supports (multiple) -inheritance. Therefore looking up a method in a class needs to consider the +inheritance. Therefore looking up a method in a class needs to consider all the +classes in the whole method resolution order. This makes the versioning of classes more complex. If a class is changed its version changes. At the same time, the versions of all the classes inheriting from it need to be changed as well, @@ -670,7 +661,8 @@ with its class. In our code above, we allow both to vary independently. In PyPy's Python interpreter we act somewhat more cleverly. The class of an instance is not stored on the instance itself, but on the map. This means -that we get one fewer promotion (and thus one fewer guard) in the trace, because the class doesn't need to +that we get one fewer promotion (and thus one fewer guard) in the trace, +because the class doesn't need to be promoted after the map has been. diff --git a/talk/icooolps2011/figures/trace-levels.pdf b/talk/icooolps2011/figures/trace-levels.pdf index ac54d57f68c799ef0594e46997952e78d0221a8a..38df074baba966497d08f34a239a3aa2808f4e18 GIT binary patch [cut] From commits-noreply at bitbucket.org Thu Mar 24 23:04:09 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:09 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: write background and start intro Message-ID: <20110324220409.372D9282BAA@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3400:e8428297679f Date: 2011-03-24 21:58 +0100 http://bitbucket.org/pypy/extradoc/changeset/e8428297679f/ Log: write background and start intro diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -83,9 +83,25 @@ %___________________________________________________________________________ \section{Introduction} -XXX how exactly -the hints work that interpreter authors can use to improve the execution speed -of the programs running on their interpreters? +One of the hardest parts of implementing a dynamic language efficiently is to +optimize its object model. This is made harder by the fact that many recent +languages such as Python, JavaScript or Ruby have rather complex core object +semantics. For them, implementing just an interpreter is already an arduous +task. Implementing them efficiently with a just-in-time compiler is +nigh-impossible, because or their many corner-cases. + +long dream of PE to not have to implement a compiler +new life by meta-tracers, spur, pypy, other things +trace execution of object model and get semantics right, not have to compile +object model + +in spur and pypy, this can be improved by the interpreter author by adding +hints, which influence tracer and optimizer + +pypy's hints go further than spurs, in this paper we present two very important +ones and show how classical implementation techniques of dynlangs can be +expressed by them + \section{Background} @@ -94,20 +110,45 @@ \subsection{The PyPy Project} \label{sect:pypy} -PyPy's approach to implementing dynamic -languages is to write an interpreter for -the language in RPython. This interpreter can be translated to C and then -further to machine code. The interpreter consists of code in the form of a -large number of generated C functions and some data. Similarly, the user -program consists of functions in the language the interpreter executes. +The PyPy project \cite{armin_rigo_pypys_2006} strives to be an environment where +complex dynamic languages can be efficiently implemented. The approach taken +when implement a language with PyPy is to write an interpreter for the language +in \emph{RPython}. RPython is a restricted subset of Python chosen in such a way +that it is possible to perform type inference on it. The interpreters in RPython +can therefore be translated to efficient C code. -XXX \cite{armin_rigo_pypys_2006} +A number of languages have been implemented with PyPy, most importantly a full +Python implementation, but also a Prolog interpreter \cite{XXX} and a Smalltalk +VM \cite{XXX}. +This translation to C code adds a number of implementation details into the +final executable that are not present in the interpreter implementation, such as +a garbage collector. The interpreter can therefore be kept free from low-level +implementation details. Another aspect of the final VM that is added +semi-automatically to the generated VM is a tracing JIT compiler. + +We call the code that runs on top of an interpreter implemented with PyPy the +\emph{user code} or \emph{user program}. %___________________________________________________________________________ \subsection{PyPy's Meta-Tracing JIT Compilers} \label{sect:tracing} +XXX citations +A recently popular approach to JIT compilers is that of tracing JITs. Tracing +JITs record traces of concrete execution paths through the program. Those traces +are therefore linear list of operations, which are optimized and then get turned +into machine code. To be able to do this recording, VMs with a tracing JIT +typically also contain an interpreter. After a user program is started the +interpreter is used until the most important paths through the user program are +turned into machine code. + +Because the traces always correspond to a concrete execution they cannot contain +any control flow splits. Therefore they encode the control flow decisions needed +to stay on the trace with the help of \emph{guards}. Those are operations that +check that the assumptions are still true when the trace is later executed with different values. + +One disadvantage of tracing JITs which makes them not directly applicable to Pypy, PyPy's JIT is a meta-tracer \cite{bolz_tracing_2009}. Since we want to re-use our tracer for a variety of languages, we @@ -135,6 +176,9 @@ of the interpreter. However, the extent of the trace is determined by the loops in the user program. +XXX trace makes the object model operations explicit and transparent to the +optimizer + \subsection{Optimizing Traces} \label{sub:optimizing} From commits-noreply at bitbucket.org Thu Mar 24 23:04:11 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:04:11 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: expand intro Message-ID: <20110324220411.03D61282BAA@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3401:ab30e4f67d6a Date: 2011-03-24 23:03 +0100 http://bitbucket.org/pypy/extradoc/changeset/ab30e4f67d6a/ Log: expand intro diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -90,17 +90,38 @@ task. Implementing them efficiently with a just-in-time compiler is nigh-impossible, because or their many corner-cases. -long dream of PE to not have to implement a compiler -new life by meta-tracers, spur, pypy, other things -trace execution of object model and get semantics right, not have to compile -object model +It has long been an objective of the partial evaluation community to +automatically produce compilers from interpreters. There has been a recent +renaissance of this idea using the different technique of tracing just-in-time +compilers. A number of projects have attempted this approach. SPUR \cite{XXX} +is a tracing JIT for .NET together with a JavaScript implementation in C\#. XXX -in spur and pypy, this can be improved by the interpreter author by adding -hints, which influence tracer and optimizer +All these projects have in common that they trace an implementation language +which is then used to implement an object model of a dynamic language. The +tracer then traces through this object model, which makes the object model +transparent to the tracer and its optimizations. Therefore the semantics of the +dynamic language does not have to be replicated in the JIT. We call this +approach \emph{meta-tracing}. We will give an introduction to the PyPy project +and to meta-tracing in Section~\ref{sec:Background}. -pypy's hints go further than spurs, in this paper we present two very important -ones and show how classical implementation techniques of dynlangs can be -expressed by them +Another commonality of all these approaches is that they require some +annotations (or hints) in the dynamic language implementation to guide the +meta-tracer. SPUR and PyPy both provide the interpreter author with more +elaborate hints to influence the meta-tracer and its optimizer. + +PyPy's hints go even further than SPUR's in that they provide the interpreter +author with a flexible toolset to make her implementation extremely efficient. +In this paper we present the two most prominent ones and show how classical +implementation techniques of dynamic languages can be expressed with them. + +The contributions of this paper are: +\begin{itemize} + \item A hint to introduce arbitrary constants into the trace. + \item A way to define new pure operations which the optimizer then recognizes. + \item A worked-out example of a simple object model of a dynamic language and how it can be improved using these hints. +\end{itemize} + + @@ -136,26 +157,27 @@ XXX citations A recently popular approach to JIT compilers is that of tracing JITs. Tracing -JITs record traces of concrete execution paths through the program. Those traces -are therefore linear list of operations, which are optimized and then get turned -into machine code. To be able to do this recording, VMs with a tracing JIT -typically also contain an interpreter. After a user program is started the -interpreter is used until the most important paths through the user program are -turned into machine code. +JITs record traces of concrete execution paths through the program. Those +traces are therefore linear list of operations, which are optimized and then +get turned into machine code. To be able to do this recording, VMs with a +tracing JIT typically also contain an interpreter. After a user program is +started the interpreter is used until the most important paths through the user +program are turned into machine code. -Because the traces always correspond to a concrete execution they cannot contain -any control flow splits. Therefore they encode the control flow decisions needed -to stay on the trace with the help of \emph{guards}. Those are operations that -check that the assumptions are still true when the trace is later executed with different values. +Because the traces always correspond to a concrete execution they cannot +contain any control flow splits. Therefore they encode the control flow +decisions needed to stay on the trace with the help of \emph{guards}. Those are +operations that check that the assumptions are still true when the trace is +later executed with different values. -One disadvantage of tracing JITs which makes them not directly applicable to Pypy, - -PyPy's JIT is a meta-tracer \cite{bolz_tracing_2009}. Since we want to re-use -our tracer for a variety of languages, we -don't trace the execution of the user program, but instead trace the execution -of the \emph{interpreter} that is running the program. This means that the traces -don't contain the bytecodes of the language in question, but RPython-level -operations that the interpreter did to execute the program. +One disadvantage of tracing JITs which makes them not directly applicable to +PyPy is that they encode the language semantics. Since PyPy wants to be a +general framework, we want to reuse our tracer for different languages. +Therefore PyPy's JIT is a meta-tracer \cite{bolz_tracing_2009}. It does not +trace the execution of the user program, but instead traces the execution of +the \emph{interpreter} that is running the program. This means that the traces +it produces don't contain the bytecodes of the language in question, but +RPython-level operations that the interpreter did to execute the program. On the other hand, the loops that are traced by the tracer are the loops in the user program. This means that the tracer stops tracing after one iteration of From commits-noreply at bitbucket.org Thu Mar 24 23:38:47 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Thu, 24 Mar 2011 23:38:47 +0100 (CET) Subject: [pypy-svn] pypy default: refactor rdict a bit: Message-ID: <20110324223847.418E2282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42923:ef06a997e7f1 Date: 2011-03-24 23:36 +0100 http://bitbucket.org/pypy/pypy/changeset/ef06a997e7f1/ Log: refactor rdict a bit: - make ll_dict_lookup return the index plus a flag bit set if the entry is unused. this removes a second access to the entry in the caller - make setdefault hash only once(!) diff --git a/pypy/rlib/_rweakkeydict.py b/pypy/rlib/_rweakkeydict.py --- a/pypy/rlib/_rweakkeydict.py +++ b/pypy/rlib/_rweakkeydict.py @@ -123,7 +123,7 @@ @jit.dont_look_inside def ll_get(d, llkey): hash = compute_identity_hash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK #llop.debug_print(lltype.Void, i, 'get', hex(hash), # ll_debugrepr(d.entries[i].key), # ll_debugrepr(d.entries[i].value)) @@ -143,7 +143,7 @@ def ll_set_nonnull(d, llkey, llvalue): hash = compute_identity_hash(llkey) keyref = weakref_create(llkey) # GC effects here, before the rest - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK everused = d.entries.everused(i) d.entries[i].key = keyref d.entries[i].value = llvalue @@ -160,7 +160,7 @@ @jit.dont_look_inside def ll_set_null(d, llkey): hash = compute_identity_hash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK if d.entries.everused(i): # If the entry was ever used, clean up its key and value. # We don't store a NULL value, but a dead weakref, because diff --git a/pypy/rlib/_rweakvaldict.py b/pypy/rlib/_rweakvaldict.py --- a/pypy/rlib/_rweakvaldict.py +++ b/pypy/rlib/_rweakvaldict.py @@ -113,7 +113,7 @@ @jit.dont_look_inside def ll_get(self, d, llkey): hash = self.ll_keyhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK #llop.debug_print(lltype.Void, i, 'get') valueref = d.entries[i].value if valueref: @@ -132,7 +132,7 @@ def ll_set_nonnull(self, d, llkey, llvalue): hash = self.ll_keyhash(llkey) valueref = weakref_create(llvalue) # GC effects here, before the rest - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK everused = d.entries.everused(i) d.entries[i].key = llkey d.entries[i].value = valueref @@ -146,7 +146,7 @@ @jit.dont_look_inside def ll_set_null(self, d, llkey): hash = self.ll_keyhash(llkey) - i = rdict.ll_dict_lookup(d, llkey, hash) + i = rdict.ll_dict_lookup(d, llkey, hash) & rdict.MASK if d.entries.everused(i): # If the entry was ever used, clean up its key and value. # We don't store a NULL value, but a dead weakref, because diff --git a/pypy/rpython/lltypesystem/rdict.py b/pypy/rpython/lltypesystem/rdict.py --- a/pypy/rpython/lltypesystem/rdict.py +++ b/pypy/rpython/lltypesystem/rdict.py @@ -4,12 +4,15 @@ from pypy.rpython.rdict import AbstractDictRepr, AbstractDictIteratorRepr,\ rtype_newdict from pypy.rpython.lltypesystem import lltype -from pypy.rlib.rarithmetic import r_uint, intmask +from pypy.rlib.rarithmetic import r_uint, intmask, LONG_BIT from pypy.rlib.objectmodel import hlinvoke from pypy.rpython import robject from pypy.rlib import objectmodel from pypy.rpython import rmodel +HIGHEST_BIT = (1 << (LONG_BIT - 2)) +MASK = (1 << (LONG_BIT - 2)) - 1 + # ____________________________________________________________ # # generic implementation of RPython dictionary, with parametric DICTKEY and @@ -422,18 +425,18 @@ def ll_dict_getitem(d, key): i = ll_dict_lookup(d, key, d.keyhash(key)) - entries = d.entries - if entries.valid(i): - return entries[i].value - else: - raise KeyError + if not i & HIGHEST_BIT: + return d.entries[i].value + else: + raise KeyError ll_dict_getitem.oopspec = 'dict.getitem(d, key)' def ll_dict_setitem(d, key, value): hash = d.keyhash(key) i = ll_dict_lookup(d, key, hash) + valid = (i & HIGHEST_BIT) == 0 + i = i & MASK everused = d.entries.everused(i) - valid = d.entries.valid(i) # set up the new entry ENTRY = lltype.typeOf(d.entries).TO.OF entry = d.entries[i] @@ -470,7 +473,7 @@ def ll_dict_delitem(d, key): i = ll_dict_lookup(d, key, d.keyhash(key)) - if not d.entries.valid(i): + if i & HIGHEST_BIT: raise KeyError _ll_dict_del(d, i) ll_dict_delitem.oopspec = 'dict.delitem(d, key)' @@ -542,7 +545,7 @@ elif entries.everused(i): freeslot = i else: - return i # pristine entry -- lookup failed + return i | HIGHEST_BIT # pristine entry -- lookup failed # In the loop, a deleted entry (everused and not valid) is by far # (factor of 100s) the least likely outcome, so test for that last. @@ -557,7 +560,7 @@ if not entries.everused(i): if freeslot == -1: freeslot = i - return freeslot + return freeslot | HIGHEST_BIT elif entries.valid(i): checkingkey = entries[i].key if direct_compare and checkingkey == key: @@ -712,16 +715,16 @@ def ll_get(dict, key, default): i = ll_dict_lookup(dict, key, dict.keyhash(key)) entries = dict.entries - if entries.valid(i): + if not i & HIGHEST_BIT: return entries[i].value - else: + else: return default ll_get.oopspec = 'dict.get(dict, key, default)' def ll_setdefault(dict, key, default): i = ll_dict_lookup(dict, key, dict.keyhash(key)) entries = dict.entries - if entries.valid(i): + if not i & HIGHEST_BIT: return entries[i].value else: ll_dict_setitem(dict, key, default) @@ -818,7 +821,7 @@ def ll_contains(d, key): i = ll_dict_lookup(d, key, d.keyhash(key)) - return d.entries.valid(i) + return not i & HIGHEST_BIT ll_contains.oopspec = 'dict.contains(d, key)' POPITEMINDEX = lltype.Struct('PopItemIndex', ('nextindex', lltype.Signed)) From commits-noreply at bitbucket.org Thu Mar 24 23:45:56 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 24 Mar 2011 23:45:56 +0100 (CET) Subject: [pypy-svn] pypy default: Fix for the failing test in test_typedef. The cause was ultimately Message-ID: <20110324224556.DE5A8282BA1@codespeak.net> Author: Armin Rigo Branch: Changeset: r42924:3dbdd576be39 Date: 2011-03-24 23:44 +0100 http://bitbucket.org/pypy/pypy/changeset/3dbdd576be39/ Log: Fix for the failing test in test_typedef. The cause was ultimately that the global _subclass_cache was populated with various 'config's, and now there was too many of them for the test to pass. diff --git a/pypy/interpreter/typedef.py b/pypy/interpreter/typedef.py --- a/pypy/interpreter/typedef.py +++ b/pypy/interpreter/typedef.py @@ -103,6 +103,7 @@ except KeyError: subcls = _getusercls(config, cls, hasdict, wants_slots, needsdel, weakrefable) + assert key not in _subclass_cache _subclass_cache[key] = subcls return subcls get_unique_interplevel_subclass._annspecialcase_ = "specialize:memo" diff --git a/pypy/interpreter/test/test_typedef.py b/pypy/interpreter/test/test_typedef.py --- a/pypy/interpreter/test/test_typedef.py +++ b/pypy/interpreter/test/test_typedef.py @@ -127,12 +127,15 @@ checks[2], checks[3])) subclasses = {} for key, subcls in typedef._subclass_cache.items(): + if key[0] is not space.config: + continue cls = key[1] subclasses.setdefault(cls, {}) - subclasses[cls][subcls] = True + prevsubcls = subclasses[cls].setdefault(subcls.__name__, subcls) + assert subcls is prevsubcls for cls, set in subclasses.items(): assert len(set) <= 6, "%s has %d subclasses:\n%r" % ( - cls, len(set), [subcls.__name__ for subcls in set]) + cls, len(set), list(set)) def test_getsetproperty(self): class W_SomeType(Wrappable): From commits-noreply at bitbucket.org Fri Mar 25 01:19:39 2011 From: commits-noreply at bitbucket.org (tav) Date: Fri, 25 Mar 2011 01:19:39 +0100 (CET) Subject: [pypy-svn] pypy default: Added cpyext/*/*.o to the .gitignore. Message-ID: <20110325001939.D2CBE282BAA@codespeak.net> Author: tav Branch: Changeset: r42925:65fd068ae340 Date: 2011-03-25 00:19 +0000 http://bitbucket.org/pypy/pypy/changeset/65fd068ae340/ Log: Added cpyext/*/*.o to the .gitignore. diff --git a/.gitignore b/.gitignore --- a/.gitignore +++ b/.gitignore @@ -12,6 +12,8 @@ pypy/doc/*.html pypy/doc/config/*.html pypy/doc/discussion/*.html +pypy/module/cpyext/src/*.o +pypy/module/cpyext/test/*.o pypy/module/test_lib_pypy/ctypes_tests/*.o pypy/translator/c/src/dtoa.o pypy/translator/goal/pypy-c From commits-noreply at bitbucket.org Fri Mar 25 01:23:14 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Fri, 25 Mar 2011 01:23:14 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: this is fixed Message-ID: <20110325002314.5B696282BAA@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3402:feebb436cc36 Date: 2011-03-24 20:23 -0400 http://bitbucket.org/pypy/extradoc/changeset/feebb436cc36/ Log: this is fixed diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -80,8 +80,6 @@ Should be just a matter of synthesizing reverse operations in rewrite.py -- strlen result is not reused - PYTHON EXAMPLES --------------- From commits-noreply at bitbucket.org Fri Mar 25 01:37:05 2011 From: commits-noreply at bitbucket.org (tav) Date: Fri, 25 Mar 2011 01:37:05 +0100 (CET) Subject: [pypy-svn] pypy default: Added myself to the authors list. Message-ID: <20110325003705.D8DEF36C203@codespeak.net> Author: tav Branch: Changeset: r42926:356265696abe Date: 2011-03-25 00:36 +0000 http://bitbucket.org/pypy/pypy/changeset/356265696abe/ Log: Added myself to the authors list. diff --git a/LICENSE b/LICENSE --- a/LICENSE +++ b/LICENSE @@ -108,6 +108,7 @@ Anders Qvist Alan McIntyre Bert Freudenberg + Tav Heinrich-Heine University, Germany Open End AB (formerly AB Strakt), Sweden From commits-noreply at bitbucket.org Fri Mar 25 11:49:42 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 11:49:42 +0100 (CET) Subject: [pypy-svn] pypy default: - setdefault only does one lookup now Message-ID: <20110325104942.7D22A282BDC@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42927:9f98d441f248 Date: 2011-03-25 11:21 +0100 http://bitbucket.org/pypy/pypy/changeset/9f98d441f248/ Log: - setdefault only does one lookup now - update doesn't recompute hashes all the time diff --git a/pypy/rpython/lltypesystem/rdict.py b/pypy/rpython/lltypesystem/rdict.py --- a/pypy/rpython/lltypesystem/rdict.py +++ b/pypy/rpython/lltypesystem/rdict.py @@ -10,8 +10,8 @@ from pypy.rlib import objectmodel from pypy.rpython import rmodel -HIGHEST_BIT = (1 << (LONG_BIT - 2)) -MASK = (1 << (LONG_BIT - 2)) - 1 +HIGHEST_BIT = intmask(1 << (LONG_BIT - 1)) +MASK = intmask(HIGHEST_BIT - 1) # ____________________________________________________________ # @@ -434,6 +434,10 @@ def ll_dict_setitem(d, key, value): hash = d.keyhash(key) i = ll_dict_lookup(d, key, hash) + return _ll_dict_setitem_lookup_done(d, key, value, hash, i) +ll_dict_setitem.oopspec = 'dict.setitem(d, key, value)' + +def _ll_dict_setitem_lookup_done(d, key, value, hash, i): valid = (i & HIGHEST_BIT) == 0 i = i & MASK everused = d.entries.everused(i) @@ -452,7 +456,6 @@ d.num_pristine_entries -= 1 if d.num_pristine_entries <= len(d.entries) / 3: ll_dict_resize(d) -ll_dict_setitem.oopspec = 'dict.setitem(d, key, value)' def ll_dict_insertclean(d, key, value, hash): # Internal routine used by ll_dict_resize() to insert an item which is @@ -722,12 +725,13 @@ ll_get.oopspec = 'dict.get(dict, key, default)' def ll_setdefault(dict, key, default): - i = ll_dict_lookup(dict, key, dict.keyhash(key)) + hash = dict.keyhash(key) + i = ll_dict_lookup(dict, key, hash) entries = dict.entries if not i & HIGHEST_BIT: return entries[i].value else: - ll_dict_setitem(dict, key, default) + _ll_dict_setitem_lookup_done(dict, key, default, hash, i) return default ll_setdefault.oopspec = 'dict.setdefault(dict, key, default)' @@ -771,7 +775,10 @@ while i < d2len: if entries.valid(i): entry = entries[i] - ll_dict_setitem(dic1, entry.key, entry.value) + hash = entries.hash(i) + key = entry.key + j = ll_dict_lookup(dic1, key, hash) + _ll_dict_setitem_lookup_done(dic1, key, entry.value, hash, j) i += 1 ll_update.oopspec = 'dict.update(dic1, dic2)' From commits-noreply at bitbucket.org Fri Mar 25 12:11:51 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 12:11:51 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix the signature of _as_ffi_pointer_, which changed recently Message-ID: <20110325111151.2CC3E282BDC@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42928:b81f0229bff2 Date: 2011-03-24 15:07 +0100 http://bitbucket.org/pypy/pypy/changeset/b81f0229bff2/ Log: fix the signature of _as_ffi_pointer_, which changed recently diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py --- a/lib_pypy/_ctypes/primitive.py +++ b/lib_pypy/_ctypes/primitive.py @@ -251,7 +251,7 @@ # make pointer-types compatible with the _ffi fast path if result._is_pointer_like(): - def _as_ffi_pointer_(self): + def _as_ffi_pointer_(self, ffitype): return self._get_buffer_value() result._as_ffi_pointer_ = _as_ffi_pointer_ diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py --- a/lib_pypy/_ctypes/pointer.py +++ b/lib_pypy/_ctypes/pointer.py @@ -114,7 +114,7 @@ contents = property(getcontents, setcontents) - def _as_ffi_pointer_(self): + def _as_ffi_pointer_(self, ffitype): return self._get_buffer_value() def _cast_addr(obj, _, tp): From commits-noreply at bitbucket.org Fri Mar 25 12:11:52 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 12:11:52 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: add a method to get the ffi type from the ctype type Message-ID: <20110325111152.13652282BDC@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42929:f5f3c368b107 Date: 2011-03-24 16:04 +0100 http://bitbucket.org/pypy/pypy/changeset/f5f3c368b107/ Log: add a method to get the ffi type from the ctype type diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py --- a/lib_pypy/_ctypes/basics.py +++ b/lib_pypy/_ctypes/basics.py @@ -50,6 +50,9 @@ def get_ffi_param(self, value): return self.from_param(value)._to_ffi_param() + def get_ffi_argtype(self): + return _shape_to_ffi_type(self._ffiargshape) + def _CData_output(self, resbuffer, base=None, index=-1): #assert isinstance(resbuffer, _rawffi.ArrayInstance) """Used when data exits ctypes and goes into user code. @@ -181,9 +184,9 @@ isinstance(shape[0], _rawffi.Structure) and shape[1] == 1) -def shape_to_ffi_type(shape): +def _shape_to_ffi_type(shape): try: - return shape_to_ffi_type.typemap[shape] + return _shape_to_ffi_type.typemap[shape] except KeyError: pass if is_struct_shape(shape): @@ -192,7 +195,7 @@ assert False, 'unknown shape %s' % (shape,) -shape_to_ffi_type.typemap = { +_shape_to_ffi_type.typemap = { 'c' : _ffi.types.char, 'b' : _ffi.types.sbyte, 'B' : _ffi.types.ubyte, diff --git a/lib_pypy/_ctypes/function.py b/lib_pypy/_ctypes/function.py --- a/lib_pypy/_ctypes/function.py +++ b/lib_pypy/_ctypes/function.py @@ -2,7 +2,7 @@ from _ctypes.basics import _CData, _CDataMeta, cdata_from_address from _ctypes.primitive import SimpleType, _SimpleCData from _ctypes.basics import ArgumentError, keepalive_key -from _ctypes.basics import shape_to_ffi_type, is_struct_shape +from _ctypes.basics import is_struct_shape from _ctypes.builtin import set_errno, set_last_error import _rawffi import _ffi @@ -210,8 +210,11 @@ # Direct construction from raw address if isinstance(argument, (int, long)) and not argsl: self._set_address(argument) - argshapes, resshape = self._ffishapes(self._argtypes_, self._restype_) - self._ptr = self._getfuncptr_fromaddress(argshapes, resshape) + restype = self._restype_ + if restype is None: + import ctypes + restype = ctypes.c_int + self._ptr = self._getfuncptr_fromaddress(self._argtypes_, restype) return @@ -363,10 +366,10 @@ # return self._build_result(self._restype_, result, newargs) - def _getfuncptr_fromaddress(self, argshapes, resshape): + def _getfuncptr_fromaddress(self, argtypes, restype): address = self._get_address() - ffiargs = [shape_to_ffi_type(shape) for shape in argshapes] - ffires = shape_to_ffi_type(resshape) + ffiargs = [argtype.get_ffi_argtype() for argtype in argtypes] + ffires = restype.get_ffi_argtype() return _ffi.FuncPtr.fromaddr(address, '', ffiargs, ffires) def _getfuncptr(self, argtypes, restype, thisarg=None): @@ -375,10 +378,8 @@ if restype is None or not isinstance(restype, _CDataMeta): import ctypes restype = ctypes.c_int - argshapes = [arg._ffiargshape for arg in argtypes] - resshape = restype._ffiargshape if self._buffer is not None: - ptr = self._getfuncptr_fromaddress(argshapes, resshape) + ptr = self._getfuncptr_fromaddress(argtypes, restype) if argtypes == self._argtypes_: self._ptr = ptr return ptr @@ -388,13 +389,15 @@ if not thisarg: raise ValueError("COM method call without VTable") ptr = thisarg[self._com_index - 0x1000] + argshapes = [arg._ffiargshape for arg in argtypes] + resshape = restype._ffiargshape return _rawffi.FuncPtr(ptr, argshapes, resshape, self._flags_) cdll = self.dll._handle try: #return cdll.ptr(self.name, argshapes, resshape, self._flags_) - ffi_argtypes = [shape_to_ffi_type(shape) for shape in argshapes] - ffi_restype = shape_to_ffi_type(resshape) + ffi_argtypes = [argtype.get_ffi_argtype() for argtype in argtypes] + ffi_restype = restype.get_ffi_argtype() self._ptr = cdll.getfunc(self.name, ffi_argtypes, ffi_restype) return self._ptr except AttributeError: From commits-noreply at bitbucket.org Fri Mar 25 12:11:54 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 12:11:54 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: check that the pointer types are compatible, before converting Message-ID: <20110325111154.A7781282BDC@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42930:58475d01d2ec Date: 2011-03-25 11:07 +0100 http://bitbucket.org/pypy/pypy/changeset/58475d01d2ec/ Log: check that the pointer types are compatible, before converting diff --git a/pypy/module/test_lib_pypy/ctypes_tests/test_pointers.py b/pypy/module/test_lib_pypy/ctypes_tests/test_pointers.py --- a/pypy/module/test_lib_pypy/ctypes_tests/test_pointers.py +++ b/pypy/module/test_lib_pypy/ctypes_tests/test_pointers.py @@ -12,6 +12,13 @@ mod._ctypes_test = str(conftest.sofile) class TestPointers(BaseCTypesTestChecker): + + def test_get_ffi_argtype(self): + P = POINTER(c_int) + ffitype = P.get_ffi_argtype() + assert P.get_ffi_argtype() is ffitype + assert ffitype.deref_pointer() is c_int.get_ffi_argtype() + def test_pointer_crash(self): class A(POINTER(c_ulong)): diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py --- a/lib_pypy/_ctypes/pointer.py +++ b/lib_pypy/_ctypes/pointer.py @@ -1,6 +1,7 @@ import _rawffi -from _ctypes.basics import _CData, _CDataMeta, cdata_from_address +import _ffi +from _ctypes.basics import _CData, _CDataMeta, cdata_from_address, ArgumentError from _ctypes.basics import keepalive_key, store_reference, ensure_objects from _ctypes.basics import sizeof, byref from _ctypes.array import Array, array_get_slice_params, array_slice_getitem,\ @@ -19,7 +20,7 @@ length = 1, _ffiargshape = 'P', _ffishape = 'P', - _fficompositesize = None + _fficompositesize = None, ) # XXX check if typedict['_type_'] is any sane # XXX remember about paramfunc @@ -66,6 +67,7 @@ self._ffiarray = ffiarray self.__init__ = __init__ self._type_ = TP + self._ffiargtype = _ffi.types.Pointer(TP.get_ffi_argtype()) from_address = cdata_from_address @@ -115,6 +117,12 @@ contents = property(getcontents, setcontents) def _as_ffi_pointer_(self, ffitype): + my_ffitype = type(self).get_ffi_argtype() + # for now, we always allow types.pointer, else a lot of tests + # break. We need to rethink how pointers are represented, though + if my_ffitype.deref_pointer() != ffitype.deref_pointer() \ + and ffitype != _ffi.types.pointer: + raise ArgumentError, "expected %s instance, got %s" % (type(self), ffitype) return self._get_buffer_value() def _cast_addr(obj, _, tp): diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py --- a/lib_pypy/_ctypes/basics.py +++ b/lib_pypy/_ctypes/basics.py @@ -51,6 +51,8 @@ return self.from_param(value)._to_ffi_param() def get_ffi_argtype(self): + if self._ffiargtype: + return self._ffiargtype return _shape_to_ffi_type(self._ffiargshape) def _CData_output(self, resbuffer, base=None, index=-1): @@ -106,6 +108,7 @@ """ __metaclass__ = _CDataMeta _objects = None + _ffiargtype = None def __init__(self, *args, **kwds): raise TypeError("%s has no type" % (type(self),)) From commits-noreply at bitbucket.org Fri Mar 25 12:11:55 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 12:11:55 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: the previous checkin was partly broken; now c_char_p and c_wchar_p are strong typed as well; there is still one failing test because now we are too strict and do not allow implicit conversions from typed pointers to void* Message-ID: <20110325111155.8A89A282BDC@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42931:1dcf40fbb6ad Date: 2011-03-25 11:15 +0100 http://bitbucket.org/pypy/pypy/changeset/1dcf40fbb6ad/ Log: the previous checkin was partly broken; now c_char_p and c_wchar_p are strong typed as well; there is still one failing test because now we are too strict and do not allow implicit conversions from typed pointers to void* diff --git a/lib_pypy/_ctypes/primitive.py b/lib_pypy/_ctypes/primitive.py --- a/lib_pypy/_ctypes/primitive.py +++ b/lib_pypy/_ctypes/primitive.py @@ -1,3 +1,4 @@ +import _ffi import _rawffi import weakref import sys @@ -140,6 +141,7 @@ value = 0 self._buffer[0] = value result.value = property(_getvalue, _setvalue) + result._ffiargtype = _ffi.types.Pointer(_ffi.types.char) elif tp == 'Z': # c_wchar_p @@ -163,6 +165,7 @@ value = 0 self._buffer[0] = value result.value = property(_getvalue, _setvalue) + result._ffiargtype = _ffi.types.Pointer(_ffi.types.unichar) elif tp == 'P': # c_void_p diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py --- a/lib_pypy/_ctypes/pointer.py +++ b/lib_pypy/_ctypes/pointer.py @@ -120,8 +120,7 @@ my_ffitype = type(self).get_ffi_argtype() # for now, we always allow types.pointer, else a lot of tests # break. We need to rethink how pointers are represented, though - if my_ffitype.deref_pointer() != ffitype.deref_pointer() \ - and ffitype != _ffi.types.pointer: + if my_ffitype.deref_pointer() != ffitype.deref_pointer(): raise ArgumentError, "expected %s instance, got %s" % (type(self), ffitype) return self._get_buffer_value() From commits-noreply at bitbucket.org Fri Mar 25 12:11:56 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 12:11:56 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: conversion from arbitrary pointers to void* is always allowed Message-ID: <20110325111156.E80FD2A2036@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42932:45edbdf84113 Date: 2011-03-25 12:10 +0100 http://bitbucket.org/pypy/pypy/changeset/45edbdf84113/ Log: conversion from arbitrary pointers to void* is always allowed diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py --- a/lib_pypy/_ctypes/pointer.py +++ b/lib_pypy/_ctypes/pointer.py @@ -120,7 +120,8 @@ my_ffitype = type(self).get_ffi_argtype() # for now, we always allow types.pointer, else a lot of tests # break. We need to rethink how pointers are represented, though - if my_ffitype.deref_pointer() != ffitype.deref_pointer(): + if my_ffitype.deref_pointer() != ffitype.deref_pointer() and \ + ffitype is not _ffi.types.pointer: raise ArgumentError, "expected %s instance, got %s" % (type(self), ffitype) return self._get_buffer_value() From commits-noreply at bitbucket.org Fri Mar 25 12:47:49 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 12:47:49 +0100 (CET) Subject: [pypy-svn] pypy default: remove some oopspecs in rdict to make the JIT trace the hash functions in Message-ID: <20110325114749.E2CF2282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: Changeset: r42933:ba5a9e3972e8 Date: 2011-03-25 12:47 +0100 http://bitbucket.org/pypy/pypy/changeset/ba5a9e3972e8/ Log: remove some oopspecs in rdict to make the JIT trace the hash functions in dicts. this makes it necessary to hide some interior field manipulation in a helper function. diff --git a/pypy/jit/codewriter/support.py b/pypy/jit/codewriter/support.py --- a/pypy/jit/codewriter/support.py +++ b/pypy/jit/codewriter/support.py @@ -399,12 +399,7 @@ return ll_rdict.ll_newdict(DICT) _ll_0_newdict.need_result_type = True - _ll_2_dict_getitem = ll_rdict.ll_dict_getitem - _ll_3_dict_setitem = ll_rdict.ll_dict_setitem _ll_2_dict_delitem = ll_rdict.ll_dict_delitem - _ll_3_dict_setdefault = ll_rdict.ll_setdefault - _ll_2_dict_contains = ll_rdict.ll_contains - _ll_3_dict_get = ll_rdict.ll_get _ll_1_dict_copy = ll_rdict.ll_copy _ll_1_dict_clear = ll_rdict.ll_clear _ll_2_dict_update = ll_rdict.ll_update diff --git a/pypy/rpython/lltypesystem/rdict.py b/pypy/rpython/lltypesystem/rdict.py --- a/pypy/rpython/lltypesystem/rdict.py +++ b/pypy/rpython/lltypesystem/rdict.py @@ -7,7 +7,7 @@ from pypy.rlib.rarithmetic import r_uint, intmask, LONG_BIT from pypy.rlib.objectmodel import hlinvoke from pypy.rpython import robject -from pypy.rlib import objectmodel +from pypy.rlib import objectmodel, jit from pypy.rpython import rmodel HIGHEST_BIT = intmask(1 << (LONG_BIT - 1)) @@ -408,6 +408,10 @@ ENTRIES = lltype.typeOf(entries).TO return ENTRIES.fasthashfn(entries[i].key) + at jit.dont_look_inside +def ll_get_value(d, i): + return d.entries[i].value + def ll_keyhash_custom(d, key): DICT = lltype.typeOf(d).TO return hlinvoke(DICT.r_rdict_hashfn, d.fnkeyhash, key) @@ -426,17 +430,16 @@ def ll_dict_getitem(d, key): i = ll_dict_lookup(d, key, d.keyhash(key)) if not i & HIGHEST_BIT: - return d.entries[i].value + return ll_get_value(d, i) else: raise KeyError -ll_dict_getitem.oopspec = 'dict.getitem(d, key)' def ll_dict_setitem(d, key, value): hash = d.keyhash(key) i = ll_dict_lookup(d, key, hash) return _ll_dict_setitem_lookup_done(d, key, value, hash, i) -ll_dict_setitem.oopspec = 'dict.setitem(d, key, value)' + at jit.dont_look_inside def _ll_dict_setitem_lookup_done(d, key, value, hash, i): valid = (i & HIGHEST_BIT) == 0 i = i & MASK @@ -717,23 +720,19 @@ def ll_get(dict, key, default): i = ll_dict_lookup(dict, key, dict.keyhash(key)) - entries = dict.entries if not i & HIGHEST_BIT: - return entries[i].value + return ll_get_value(dict, i) else: return default -ll_get.oopspec = 'dict.get(dict, key, default)' def ll_setdefault(dict, key, default): hash = dict.keyhash(key) i = ll_dict_lookup(dict, key, hash) - entries = dict.entries if not i & HIGHEST_BIT: - return entries[i].value + return ll_get_value(dict, i) else: _ll_dict_setitem_lookup_done(dict, key, default, hash, i) return default -ll_setdefault.oopspec = 'dict.setdefault(dict, key, default)' def ll_copy(dict): DICT = lltype.typeOf(dict).TO @@ -829,7 +828,6 @@ def ll_contains(d, key): i = ll_dict_lookup(d, key, d.keyhash(key)) return not i & HIGHEST_BIT -ll_contains.oopspec = 'dict.contains(d, key)' POPITEMINDEX = lltype.Struct('PopItemIndex', ('nextindex', lltype.Signed)) global_popitem_index = lltype.malloc(POPITEMINDEX, zero=True, immortal=True) diff --git a/pypy/jit/metainterp/test/test_dict.py b/pypy/jit/metainterp/test/test_dict.py --- a/pypy/jit/metainterp/test/test_dict.py +++ b/pypy/jit/metainterp/test/test_dict.py @@ -1,6 +1,7 @@ import py from pypy.jit.metainterp.test.test_basic import LLJitMixin, OOJitMixin from pypy.rlib.jit import JitDriver +from pypy.rlib import objectmodel class DictTests: @@ -69,6 +70,66 @@ res = self.meta_interp(f, [10], listops=True) assert res == expected + def test_dict_trace_hash(self): + myjitdriver = JitDriver(greens = [], reds = ['total', 'dct']) + def key(x): + return x % 2 + def eq(x, y): + return (x % 2) == (y % 2) + + def f(n): + dct = objectmodel.r_dict(eq, key) + total = n + while total: + myjitdriver.jit_merge_point(total=total, dct=dct) + if total not in dct: + dct[total] = [] + dct[total].append(total) + total -= 1 + return len(dct[0]) + + res1 = f(100) + res2 = self.meta_interp(f, [100], listops=True) + assert res1 == res2 + self.check_loops(int_mod=1) # the hash was traced + + def test_dict_setdefault(self): + myjitdriver = JitDriver(greens = [], reds = ['total', 'dct']) + def f(n): + dct = {} + total = n + while total: + myjitdriver.jit_merge_point(total=total, dct=dct) + dct.setdefault(total % 2, []).append(total) + total -= 1 + return len(dct[0]) + + assert f(100) == 50 + res = self.meta_interp(f, [100], listops=True) + assert res == 50 + self.check_loops(new=0, new_with_vtable=0) + + def test_dict_as_counter(self): + myjitdriver = JitDriver(greens = [], reds = ['total', 'dct']) + def key(x): + return x % 2 + def eq(x, y): + return (x % 2) == (y % 2) + + def f(n): + dct = objectmodel.r_dict(eq, key) + total = n + while total: + myjitdriver.jit_merge_point(total=total, dct=dct) + dct[total] = dct.get(total, 0) + 1 + total -= 1 + return dct[0] + + assert f(100) == 50 + res = self.meta_interp(f, [100], listops=True) + assert res == 50 + self.check_loops(int_mod=1) + class TestOOtype(DictTests, OOJitMixin): pass From commits-noreply at bitbucket.org Fri Mar 25 15:07:38 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 15:07:38 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: rename _ffi.types.pointer to void_p, to underline that it is a untyped pointer Message-ID: <20110325140738.4A4EB282BDD@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42934:24e432f3c808 Date: 2011-03-25 12:21 +0100 http://bitbucket.org/pypy/pypy/changeset/24e432f3c808/ Log: rename _ffi.types.pointer to void_p, to underline that it is a untyped pointer diff --git a/pypy/module/_ffi/test/test__ffi.py b/pypy/module/_ffi/test/test__ffi.py --- a/pypy/module/_ffi/test/test__ffi.py +++ b/pypy/module/_ffi/test/test__ffi.py @@ -148,9 +148,9 @@ from _ffi import CDLL, types libfoo = CDLL(self.libfoo_name) get_dummy = libfoo.getfunc('get_dummy', [], types.sint) - get_dummy_ptr = libfoo.getfunc('get_dummy_ptr', [], types.pointer) + get_dummy_ptr = libfoo.getfunc('get_dummy_ptr', [], types.void_p) set_val_to_ptr = libfoo.getfunc('set_val_to_ptr', - [types.pointer, types.sint], + [types.void_p, types.sint], types.void) assert get_dummy() == 0 ptr = get_dummy_ptr() @@ -170,14 +170,14 @@ def __init__(self, value): self.value = value def _as_ffi_pointer_(self, ffitype): - assert ffitype is types.pointer + assert ffitype is types.void_p return self.value libfoo = CDLL(self.libfoo_name) get_dummy = libfoo.getfunc('get_dummy', [], types.sint) - get_dummy_ptr = libfoo.getfunc('get_dummy_ptr', [], types.pointer) + get_dummy_ptr = libfoo.getfunc('get_dummy_ptr', [], types.void_p) set_val_to_ptr = libfoo.getfunc('set_val_to_ptr', - [types.pointer, types.sint], + [types.void_p, types.sint], types.void) assert get_dummy() == 0 ptr = get_dummy_ptr() @@ -222,7 +222,7 @@ import sys from _ffi import CDLL, types libfoo = CDLL(self.libfoo_name) - is_null_ptr = libfoo.getfunc('is_null_ptr', [types.pointer], types.ulong) + is_null_ptr = libfoo.getfunc('is_null_ptr', [types.void_p], types.ulong) assert not is_null_ptr(sys.maxint+1) def test_unsigned_long_args(self): diff --git a/pypy/module/_ffi/interp_ffi.py b/pypy/module/_ffi/interp_ffi.py --- a/pypy/module/_ffi/interp_ffi.py +++ b/pypy/module/_ffi/interp_ffi.py @@ -104,7 +104,7 @@ W_FFIType('double', libffi.types.double), W_FFIType('float', libffi.types.float), W_FFIType('void', libffi.types.void), - W_FFIType('pointer', libffi.types.pointer), + W_FFIType('void_p', libffi.types.pointer), # # missing types: From commits-noreply at bitbucket.org Fri Mar 25 15:07:38 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 15:07:38 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: s/types.pointer/types.void_p Message-ID: <20110325140738.DFECA282BDD@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42935:d7693a1c25e7 Date: 2011-03-25 15:05 +0100 http://bitbucket.org/pypy/pypy/changeset/d7693a1c25e7/ Log: s/types.pointer/types.void_p diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py --- a/lib_pypy/_ctypes/basics.py +++ b/lib_pypy/_ctypes/basics.py @@ -207,10 +207,10 @@ 'Q' : _ffi.types.ulonglong, 'f' : _ffi.types.float, 'd' : _ffi.types.double, - 's' : _ffi.types.pointer, - 'P' : _ffi.types.pointer, - 'z' : _ffi.types.pointer, - 'O' : _ffi.types.pointer, - 'Z' : _ffi.types.pointer, + 's' : _ffi.types.void_p, + 'P' : _ffi.types.void_p, + 'z' : _ffi.types.void_p, + 'O' : _ffi.types.void_p, + 'Z' : _ffi.types.void_p, } From commits-noreply at bitbucket.org Fri Mar 25 15:07:39 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 15:07:39 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: merge heads Message-ID: <20110325140739.6DA88282BDD@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42936:a0dfeec2f19a Date: 2011-03-25 15:06 +0100 http://bitbucket.org/pypy/pypy/changeset/a0dfeec2f19a/ Log: merge heads diff --git a/lib_pypy/_ctypes/basics.py b/lib_pypy/_ctypes/basics.py --- a/lib_pypy/_ctypes/basics.py +++ b/lib_pypy/_ctypes/basics.py @@ -50,6 +50,11 @@ def get_ffi_param(self, value): return self.from_param(value)._to_ffi_param() + def get_ffi_argtype(self): + if self._ffiargtype: + return self._ffiargtype + return _shape_to_ffi_type(self._ffiargshape) + def _CData_output(self, resbuffer, base=None, index=-1): #assert isinstance(resbuffer, _rawffi.ArrayInstance) """Used when data exits ctypes and goes into user code. @@ -103,6 +108,7 @@ """ __metaclass__ = _CDataMeta _objects = None + _ffiargtype = None def __init__(self, *args, **kwds): raise TypeError("%s has no type" % (type(self),)) @@ -181,9 +187,9 @@ isinstance(shape[0], _rawffi.Structure) and shape[1] == 1) -def shape_to_ffi_type(shape): +def _shape_to_ffi_type(shape): try: - return shape_to_ffi_type.typemap[shape] + return _shape_to_ffi_type.typemap[shape] except KeyError: pass if is_struct_shape(shape): @@ -192,7 +198,7 @@ assert False, 'unknown shape %s' % (shape,) -shape_to_ffi_type.typemap = { +_shape_to_ffi_type.typemap = { 'c' : _ffi.types.char, 'b' : _ffi.types.sbyte, 'B' : _ffi.types.ubyte, From commits-noreply at bitbucket.org Fri Mar 25 15:07:40 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Fri, 25 Mar 2011 15:07:40 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: fix the last occurence of types.void_p Message-ID: <20110325140740.93DC92A2035@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42937:f14e1bafc32c Date: 2011-03-25 15:07 +0100 http://bitbucket.org/pypy/pypy/changeset/f14e1bafc32c/ Log: fix the last occurence of types.void_p diff --git a/lib_pypy/_ctypes/pointer.py b/lib_pypy/_ctypes/pointer.py --- a/lib_pypy/_ctypes/pointer.py +++ b/lib_pypy/_ctypes/pointer.py @@ -121,7 +121,7 @@ # for now, we always allow types.pointer, else a lot of tests # break. We need to rethink how pointers are represented, though if my_ffitype.deref_pointer() != ffitype.deref_pointer() and \ - ffitype is not _ffi.types.pointer: + ffitype is not _ffi.types.void_p: raise ArgumentError, "expected %s instance, got %s" % (type(self), ffitype) return self._get_buffer_value() From commits-noreply at bitbucket.org Fri Mar 25 15:11:51 2011 From: commits-noreply at bitbucket.org (arigo) Date: Fri, 25 Mar 2011 15:11:51 +0100 (CET) Subject: [pypy-svn] pypy default: By default, disable 'withsmalllong'. It seems to not give the Message-ID: <20110325141151.15EA6282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42938:2e526cf60816 Date: 2011-03-25 15:11 +0100 http://bitbucket.org/pypy/pypy/changeset/2e526cf60816/ Log: By default, disable 'withsmalllong'. It seems to not give the expected speed-ups right now (will confirm it by looking at the performance in the following days). diff --git a/pypy/config/pypyoption.py b/pypy/config/pypyoption.py --- a/pypy/config/pypyoption.py +++ b/pypy/config/pypyoption.py @@ -357,8 +357,8 @@ config.objspace.std.suggest(optimized_list_getitem=True) config.objspace.std.suggest(getattributeshortcut=True) config.objspace.std.suggest(newshortcut=True) - if not IS_64_BITS: - config.objspace.std.suggest(withsmalllong=True) + #if not IS_64_BITS: + # config.objspace.std.suggest(withsmalllong=True) # extra costly optimizations only go in level 3 if level == '3': From commits-noreply at bitbucket.org Fri Mar 25 17:39:57 2011 From: commits-noreply at bitbucket.org (arigo) Date: Fri, 25 Mar 2011 17:39:57 +0100 (CET) Subject: [pypy-svn] pypy default: Remove the scary sequence of operations for int_mod, and Message-ID: <20110325163957.1624F282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42939:e43b8c3ccf87 Date: 2011-03-25 17:25 +0100 http://bitbucket.org/pypy/pypy/changeset/e43b8c3ccf87/ Log: Remove the scary sequence of operations for int_mod, and replace it with a call to a helper. The helper contains an 'if:else:' path, but it should generate far better code, both normally and when JITted. diff --git a/pypy/rpython/test/test_rint.py b/pypy/rpython/test/test_rint.py --- a/pypy/rpython/test/test_rint.py +++ b/pypy/rpython/test/test_rint.py @@ -266,6 +266,8 @@ x = inttype(random.randint(-100000, 100000)) y = inttype(random.randint(-100000, 100000)) if not y: continue + if (i & 31) == 0: + x = (x//y) * y # case where x is exactly divisible by y res = self.interpret(d, [x, y]) assert res == d(x, y) @@ -276,6 +278,8 @@ x = inttype(random.randint(-100000, 100000)) y = inttype(random.randint(-100000, 100000)) if not y: continue + if (i & 31) == 0: + x = (x//y) * y # case where x is exactly divisible by y res = self.interpret(m, [x, y]) assert res == m(x, y) diff --git a/pypy/rpython/rint.py b/pypy/rpython/rint.py --- a/pypy/rpython/rint.py +++ b/pypy/rpython/rint.py @@ -239,25 +239,26 @@ v_res = hop.genop(prefix + 'sub', [v_res, v_corr], resulttype=repr) elif op == 'mod': - # return r + y*(((x^y)<0)&(r!=0)); - v_xor = hop.genop(prefix + 'xor', vlist, - resulttype=repr) - v_xor_le = hop.genop(prefix + 'lt', [v_xor, c_zero], - resulttype=Bool) - v_xor_le = hop.llops.convertvar(v_xor_le, bool_repr, repr) - v_mod_ne = hop.genop(prefix + 'ne', [v_res, c_zero], - resulttype=Bool) - v_mod_ne = hop.llops.convertvar(v_mod_ne, bool_repr, repr) - v_corr1 = hop.genop(prefix + 'and', [v_xor_le, v_mod_ne], - resulttype=repr) - v_corr = hop.genop(prefix + 'mul', [v_corr1, vlist[1]], - resulttype=repr) - v_res = hop.genop(prefix + 'add', [v_res, v_corr], - resulttype=repr) + llfunc = globals()['ll_correct_' + prefix + 'mod'] + v_res = hop.gendirectcall(llfunc, vlist[1], v_res) v_res = hop.llops.convertvar(v_res, repr, r_result) return v_res +INT_BITS_1 = r_int.BITS - 1 +LLONG_BITS_1 = r_longlong.BITS - 1 + +def ll_correct_int_mod(y, r): + if y < 0: u = -r + else: u = r + return r + (y & (u >> INT_BITS_1)) + +def ll_correct_llong_mod(y, r): + if y < 0: u = -r + else: u = r + return r + (y & (u >> LLONG_BITS_1)) + + #Helper functions for comparisons def _rtype_compare_template(hop, func): From commits-noreply at bitbucket.org Fri Mar 25 17:39:57 2011 From: commits-noreply at bitbucket.org (arigo) Date: Fri, 25 Mar 2011 17:39:57 +0100 (CET) Subject: [pypy-svn] pypy default: Rewrite in the same way int_floordiv. Message-ID: <20110325163957.9E736282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42940:8e5db514fc1f Date: 2011-03-25 17:34 +0100 http://bitbucket.org/pypy/pypy/changeset/8e5db514fc1f/ Log: Rewrite in the same way int_floordiv. diff --git a/pypy/rpython/rint.py b/pypy/rpython/rint.py --- a/pypy/rpython/rint.py +++ b/pypy/rpython/rint.py @@ -212,35 +212,18 @@ # cpython, and rpython, assumed that integer division truncates # towards -infinity. however, in C99 and most (all?) other # backends, integer division truncates towards 0. so assuming - # that, we can generate scary code that applies the necessary + # that, we call a helper function that applies the necessary # correction in the right cases. - # paper and pencil are encouraged for this :) - - from pypy.rpython.rbool import bool_repr - assert isinstance(repr.lowleveltype, Number) - c_zero = inputconst(repr.lowleveltype, repr.lowleveltype._default) op = func.split('_', 1)[0] if op == 'floordiv': - # return (x/y) - (((x^y)<0)&((x%y)!=0)); - v_xor = hop.genop(prefix + 'xor', vlist, - resulttype=repr) - v_xor_le = hop.genop(prefix + 'lt', [v_xor, c_zero], - resulttype=Bool) - v_xor_le = hop.llops.convertvar(v_xor_le, bool_repr, repr) - v_mod = hop.genop(prefix + 'mod', vlist, - resulttype=repr) - v_mod_ne = hop.genop(prefix + 'ne', [v_mod, c_zero], - resulttype=Bool) - v_mod_ne = hop.llops.convertvar(v_mod_ne, bool_repr, repr) - v_corr = hop.genop(prefix + 'and', [v_xor_le, v_mod_ne], - resulttype=repr) - v_res = hop.genop(prefix + 'sub', [v_res, v_corr], - resulttype=repr) + llfunc = globals()['ll_correct_' + prefix + 'floordiv'] + v_res = hop.gendirectcall(llfunc, vlist[0], vlist[1], v_res) elif op == 'mod': llfunc = globals()['ll_correct_' + prefix + 'mod'] v_res = hop.gendirectcall(llfunc, vlist[1], v_res) + v_res = hop.llops.convertvar(v_res, repr, r_result) return v_res @@ -248,6 +231,18 @@ INT_BITS_1 = r_int.BITS - 1 LLONG_BITS_1 = r_longlong.BITS - 1 +def ll_correct_int_floordiv(x, y, r): + p = r * y + if y < 0: u = p - x + else: u = x - p + return r + (u >> INT_BITS_1) + +def ll_correct_llong_floordiv(x, y, r): + p = r * y + if y < 0: u = p - x + else: u = x - p + return r + (u >> LLONG_BITS_1) + def ll_correct_int_mod(y, r): if y < 0: u = -r else: u = r From commits-noreply at bitbucket.org Fri Mar 25 18:10:39 2011 From: commits-noreply at bitbucket.org (fijal) Date: Fri, 25 Mar 2011 18:10:39 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Define semantics - debug_llinterpcall can raise any exception that will Message-ID: <20110325171039.01661282BDD@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42941:4545107df688 Date: 2011-03-25 11:09 -0600 http://bitbucket.org/pypy/pypy/changeset/4545107df688/ Log: Define semantics - debug_llinterpcall can raise any exception that will be wrapped diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -553,7 +553,8 @@ 'debug_pdb': LLOp(), 'debug_assert': LLOp(tryfold=True), 'debug_fatalerror': LLOp(), - 'debug_llinterpcall': LLOp(), # Python func call 'res=arg[0](*arg[1:])' + 'debug_llinterpcall': LLOp(canraise=(Exception,)), + # Python func call 'res=arg[0](*arg[1:])' # in backends, abort() or whatever is fine 'debug_start_traceback': LLOp(), 'debug_record_traceback': LLOp(), diff --git a/pypy/rlib/debug.py b/pypy/rlib/debug.py --- a/pypy/rlib/debug.py +++ b/pypy/rlib/debug.py @@ -175,6 +175,7 @@ c_pythonfunction = hop.inputconst(lltype.Void, pythonfunction) args_v = [hop.inputarg(hop.args_r[i], arg=i) for i in range(2, hop.nb_args)] + hop.exception_is_here() return hop.genop('debug_llinterpcall', [c_pythonfunction] + args_v, resulttype=RESTYPE) diff --git a/pypy/rpython/test/test_llinterp.py b/pypy/rpython/test/test_llinterp.py --- a/pypy/rpython/test/test_llinterp.py +++ b/pypy/rpython/test/test_llinterp.py @@ -658,3 +658,25 @@ assert x == -42 res = interpret(f, []) + +def test_raising_llimpl(): + from pypy.rpython.extfunc import register_external + + def external(): + pass + + def raising(): + raise OSError(15, "abcd") + + ext = register_external(external, [], llimpl=raising, llfakeimpl=raising) + + def f(): + # this is a useful llfakeimpl that raises an exception + try: + external() + return True + except OSError: + return False + + res = interpret(f, []) + assert not res diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py --- a/pypy/rpython/llinterp.py +++ b/pypy/rpython/llinterp.py @@ -532,7 +532,10 @@ raise LLFatalError(msg, LLException(ll_exc_type, ll_exc)) def op_debug_llinterpcall(self, pythonfunction, *args_ll): - return pythonfunction(*args_ll) + try: + return pythonfunction(*args_ll) + except: + self.make_llexception() def op_debug_start_traceback(self, *args): pass # xxx write debugging code here? From commits-noreply at bitbucket.org Fri Mar 25 18:10:40 2011 From: commits-noreply at bitbucket.org (fijal) Date: Fri, 25 Mar 2011 18:10:40 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: Merge default Message-ID: <20110325171040.4316A282BDE@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42942:92d5a34ee6aa Date: 2011-03-25 11:10 -0600 http://bitbucket.org/pypy/pypy/changeset/92d5a34ee6aa/ Log: Merge default From commits-noreply at bitbucket.org Fri Mar 25 18:12:08 2011 From: commits-noreply at bitbucket.org (fijal) Date: Fri, 25 Mar 2011 18:12:08 +0100 (CET) Subject: [pypy-svn] pypy jit-lsprofile: enable lsprof module Message-ID: <20110325171208.241E9282BDD@codespeak.net> Author: Maciej Fijalkowski Branch: jit-lsprofile Changeset: r42943:d95fe81efcdc Date: 2011-03-25 11:11 -0600 http://bitbucket.org/pypy/pypy/changeset/d95fe81efcdc/ Log: enable lsprof module diff --git a/pypy/jit/tl/pypyjit.py b/pypy/jit/tl/pypyjit.py --- a/pypy/jit/tl/pypyjit.py +++ b/pypy/jit/tl/pypyjit.py @@ -39,6 +39,7 @@ config.objspace.usemodules.array = True config.objspace.usemodules._weakref = True config.objspace.usemodules._sre = False +config.objspace.usemodules._lsprof = True # config.objspace.usemodules._ffi = True # From commits-noreply at bitbucket.org Fri Mar 25 19:27:53 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 25 Mar 2011 19:27:53 +0100 (CET) Subject: [pypy-svn] pypy default: Ensure that a PyTypeObject is "realized" only once, either directly with PyType_Ready Message-ID: <20110325182753.3E10B282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42944:2f175371d552 Date: 2011-03-25 16:08 +0100 http://bitbucket.org/pypy/pypy/changeset/2f175371d552/ Log: Ensure that a PyTypeObject is "realized" only once, either directly with PyType_Ready or recursively when it is referenced in tp_base. diff --git a/pypy/module/cpyext/typeobject.py b/pypy/module/cpyext/typeobject.py --- a/pypy/module/cpyext/typeobject.py +++ b/pypy/module/cpyext/typeobject.py @@ -3,7 +3,7 @@ from pypy.rpython.lltypesystem import rffi, lltype from pypy.rpython.annlowlevel import llhelper -from pypy.interpreter.baseobjspace import DescrMismatch +from pypy.interpreter.baseobjspace import W_Root, DescrMismatch from pypy.objspace.std.typeobject import W_TypeObject from pypy.interpreter.typedef import GetSetProperty from pypy.module.cpyext.api import ( @@ -295,7 +295,8 @@ def init_typeobject(space): # Probably a hack space.model.typeorder[W_PyCTypeObject] = [(W_PyCTypeObject, None), - (W_TypeObject, None)] + (W_TypeObject, None), + (W_Root, None)] make_typedescr(space.w_type.instancetypedef, basestruct=PyTypeObject, @@ -477,14 +478,19 @@ def PyType_Ready(space, pto): if pto.c_tp_flags & Py_TPFLAGS_READY: return 0 + type_realize(space, rffi.cast(PyObject, pto)) + return 0 + +def type_realize(space, py_obj): + pto = rffi.cast(PyTypeObjectPtr, py_obj) assert pto.c_tp_flags & Py_TPFLAGS_READYING == 0 pto.c_tp_flags |= Py_TPFLAGS_READYING try: - type_realize(space, rffi.cast(PyObject, pto)) - pto.c_tp_flags |= Py_TPFLAGS_READY + w_obj = _type_realize(space, py_obj) finally: pto.c_tp_flags &= ~Py_TPFLAGS_READYING - return 0 + pto.c_tp_flags |= Py_TPFLAGS_READY + return w_obj def solid_base(space, w_type): typedef = w_type.instancetypedef @@ -540,7 +546,7 @@ finally: Py_DecRef(space, base_pyo) -def type_realize(space, py_obj): +def _type_realize(space, py_obj): """ Creates an interpreter type from a PyTypeObject structure. """ From commits-noreply at bitbucket.org Fri Mar 25 19:27:54 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 25 Mar 2011 19:27:54 +0100 (CET) Subject: [pypy-svn] pypy default: cpyext: Fix call to the tp_getattro type slot. Message-ID: <20110325182754.1137F282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42945:f3bddf2a88e0 Date: 2011-03-25 17:23 +0100 http://bitbucket.org/pypy/pypy/changeset/f3bddf2a88e0/ Log: cpyext: Fix call to the tp_getattro type slot. diff --git a/pypy/module/cpyext/test/test_bufferobject.py b/pypy/module/cpyext/test/test_bufferobject.py --- a/pypy/module/cpyext/test/test_bufferobject.py +++ b/pypy/module/cpyext/test/test_bufferobject.py @@ -46,4 +46,5 @@ return PyBuffer_New(150); """), ]) - module.buffer_new() + b = module.buffer_new() + raises(AttributeError, getattr, b, 'x') diff --git a/pypy/module/cpyext/slotdefs.py b/pypy/module/cpyext/slotdefs.py --- a/pypy/module/cpyext/slotdefs.py +++ b/pypy/module/cpyext/slotdefs.py @@ -4,9 +4,9 @@ from pypy.module.cpyext.api import generic_cpy_call, cpython_api, PyObject from pypy.module.cpyext.typeobjectdefs import ( unaryfunc, wrapperfunc, ternaryfunc, PyTypeObjectPtr, binaryfunc, - getattrfunc, setattrofunc, lenfunc, ssizeargfunc, ssizessizeargfunc, - ssizeobjargproc, iternextfunc, initproc, richcmpfunc, hashfunc, - descrgetfunc, descrsetfunc, objobjproc) + getattrfunc, getattrofunc, setattrofunc, lenfunc, ssizeargfunc, + ssizessizeargfunc, ssizeobjargproc, iternextfunc, initproc, richcmpfunc, + hashfunc, descrgetfunc, descrsetfunc, objobjproc) from pypy.module.cpyext.pyobject import from_ref from pypy.module.cpyext.pyerrors import PyErr_Occurred from pypy.module.cpyext.state import State @@ -65,6 +65,12 @@ finally: rffi.free_charp(name_ptr) +def wrap_getattro(space, w_self, w_args, func): + func_target = rffi.cast(getattrofunc, func) + check_num_args(space, w_args, 1) + args_w = space.fixedview(w_args) + return generic_cpy_call(space, func_target, w_self, args_w[0]) + def wrap_setattr(space, w_self, w_args, func): func_target = rffi.cast(setattrofunc, func) check_num_args(space, w_args, 2) @@ -289,7 +295,12 @@ # irregular interface, because of tp_getattr/tp_getattro confusion if NAME == "__getattr__": - wrapper = wrap_getattr + if SLOT == "tp_getattro": + wrapper = wrap_getattro + elif SLOT == "tp_getattr": + wrapper = wrap_getattr + else: + assert False function = globals().get(FUNCTION, None) assert FLAGS == 0 or FLAGS == PyWrapperFlag_KEYWORDS From commits-noreply at bitbucket.org Fri Mar 25 20:37:30 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 20:37:30 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: flesh out the introduction Message-ID: <20110325193730.44D12282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3403:8940b7d8503e Date: 2011-03-25 20:36 +0100 http://bitbucket.org/pypy/extradoc/changeset/8940b7d8503e/ Log: flesh out the introduction diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -93,35 +93,59 @@ It has long been an objective of the partial evaluation community to automatically produce compilers from interpreters. There has been a recent renaissance of this idea using the different technique of tracing just-in-time -compilers. A number of projects have attempted this approach. SPUR \cite{XXX} -is a tracing JIT for .NET together with a JavaScript implementation in C\#. XXX +compilers. A number of projects have attempted this approach. SPUR \cite{XXX} is +a tracing JIT for .NET together with a JavaScript implementation in C\#. PyPy +\cite{armin_rigo_pypys_2006} contains a tracing JIT for RPython (a restricted +subset of Python). This JIT is then used to trace a number of languages +implementations written in RPython. A number of other experiments in this +directions were done, such as an interpreter for Lua in JavaScript, which is run +on and optimized with a tracing JIT for JavaScript +\cite{yermolovich_optimization_2009}. -All these projects have in common that they trace an implementation language -which is then used to implement an object model of a dynamic language. The -tracer then traces through this object model, which makes the object model -transparent to the tracer and its optimizations. Therefore the semantics of the -dynamic language does not have to be replicated in the JIT. We call this -approach \emph{meta-tracing}. We will give an introduction to the PyPy project -and to meta-tracing in Section~\ref{sec:Background}. +These projects have in common that they implement a dynamic language in some +implementation language. In addition they build a tracing JIT for that implementation +language. The tracing JIT then traces through the object model of the dynamic +language implementation. This makes the object model transparent to the tracer +and its optimizations. Therefore the semantics of the dynamic language does not +have to be replicated in a JIT. We call this approach \emph{meta-tracing}. +Another commonality of these approaches is that they allow some annotations (or +hints) in the dynamic language implementation to guide the meta-tracer. This +makes the process not completely automatic but can give good speedups over +bare meta-tracing. -Another commonality of all these approaches is that they require some -annotations (or hints) in the dynamic language implementation to guide the -meta-tracer. SPUR and PyPy both provide the interpreter author with more -elaborate hints to influence the meta-tracer and its optimizer. +In this paper we present two of these hints that are extensively used in the +PyPy project to improve the performance of its Python interpreter. + The PyPy's hints go even further than SPUR's in that they provide the interpreter author with a flexible toolset to make her implementation extremely efficient. In this paper we present the two most prominent ones and show how classical -implementation techniques of dynamic languages can be expressed with them. +implementation techniques of dynamic languages can be expressed with them. These +hints are used to control how the optimizer of the tracing JIT can improve the +traces of the object model. More specifically, these hints influence the +constant folding optimization. The first hint make it possible to turn arbitrary +variables in the trace into constants. The second hint allows the definition of +additional foldable operations. + +Together these two hints can be used to express many classic implementation +techniques used for object models of dynamic languages, such as maps and +polymorphic inline caches. The contributions of this paper are: \begin{itemize} - \item A hint to introduce arbitrary constants into the trace. - \item A way to define new pure operations which the optimizer then recognizes. - \item A worked-out example of a simple object model of a dynamic language and how it can be improved using these hints. + \item A hint to turn arbitrary variables into constants in the trace. + \item A way to define new pure operations which the constant folding + optimization then recognizes. + \item A worked-out example of a simple object model of a dynamic language and + how it can be improved using these hints. \end{itemize} - +The paper is structured as follows: Section~\ref{sec:background} gives an +introduction to the PyPy project and meta-tracing and presents an example of a +tiny dynamic language object model. Section~\ref{sec:hints} presents the hints, +what they do and how they are applied. Section~\ref{sec:fastobjmodel} shows how +the hints are applied to the tiny object model and Section~\ref{sec:evaluation} +presents benchmarks. @@ -280,7 +304,8 @@ %___________________________________________________________________________ -\section{Controlling Optimization} +\section{Hints for Controlling Optimization} +\label{sec:hints} In this section we will describe how to add two hints that allow the interpreter author to increase the optimization opportunities for constant @@ -559,7 +584,7 @@ \subsection{Conclusion} -In this section we presented two more hints that can be used in the source code +In this section we presented two hints that can be used in the source code of the interpreter. They are used to influence what the optimizer does with the trace. The examples given here are a bit too small, the next section gives a worked-out example that puts all the pieces together. @@ -567,6 +592,7 @@ %___________________________________________________________________________ \section{Putting It All Together} +\label{sec:fastobjmodel} In this section we describe how the simple object model from Section~\ref{sub:running} can be made efficient using the hints described in the @@ -762,7 +788,7 @@ makes operations on objects seriously faster. \section{Evaluation} -\label{sect:evaluation} +\label{sec:evaluation} \section{Related Work} diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -219,6 +219,21 @@ pages = {144--153} }, + at inproceedings{yermolovich_optimization_2009, + address = {Orlando, Florida, {USA}}, + title = {Optimization of dynamic languages using hierarchical layering of virtual machines}, + isbn = {978-1-60558-769-1}, + url = {http://portal.acm.org/citation.cfm?id=1640134.1640147}, + doi = {10.1145/1640134.1640147}, + abstract = {Creating an interpreter is a simple and fast way to implement a dynamic programming language. With this ease also come major drawbacks. Interpreters are significantly slower than compiled machine code because they have a high dispatch overhead and cannot perform optimizations. To overcome these limitations, interpreters are commonly combined with just-in-time compilers to improve the overall performance. However, this means that a just-in-time compiler has to be implemented for each language.}, + booktitle = {Proceedings of the 5th symposium on Dynamic languages}, + publisher = {{ACM}}, + author = {Alexander Yermolovich and Christian Wimmer and Michael Franz}, + year = {2009}, + keywords = {actionscript, dynamic languages, hierarchical virtual machines, trace compilation}, + pages = {79--88} +}, + @inproceedings{carl_friedrich_bolz_how_2007, title = {How to not write a Virtual Machine}, abstract = {Typical modern dynamic languages have a growing number of implementations. We explore the reasons for this situation, and the limitations it imposes on open source or academic communities that lack the resources to fine-tune and maintain them all. It is sometimes proposed that implementing dynamic languages on top of a standardized general-purpose object-oriented virtual machine (like Java or {.NET)} would help reduce this burden. We propose a complementary alternative to writing custom virtual machine {(VMs)} by hand, validated by the {PyPy} project: flexibly generating {VMs} from a high-level "specification", From commits-noreply at bitbucket.org Fri Mar 25 21:03:38 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Fri, 25 Mar 2011 21:03:38 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: dont force every box in optimizer.values not allowed to propagate to the next iteration. And make sure the reconstruct_for_next_iteration methods always creates a new instance to prevent status data from beeing unintentionally propagated Message-ID: <20110325200338.BAE82282BDD@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42946:bb8b6e1f1c9c Date: 2011-03-25 21:01 +0100 http://bitbucket.org/pypy/pypy/changeset/bb8b6e1f1c9c/ Log: dont force every box in optimizer.values not allowed to propagate to the next iteration. And make sure the reconstruct_for_next_iteration methods always creates a new instance to prevent status data from beeing unintentionally propagated diff --git a/pypy/jit/metainterp/optimizeopt/fficall.py b/pypy/jit/metainterp/optimizeopt/fficall.py --- a/pypy/jit/metainterp/optimizeopt/fficall.py +++ b/pypy/jit/metainterp/optimizeopt/fficall.py @@ -67,7 +67,8 @@ def __init__(self): self.funcinfo = None - def reconstruct_for_next_iteration(self, optimizer, valuemap): + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): return OptFfiCall() # FIXME: Should any status be saved for next iteration? diff --git a/pypy/jit/metainterp/optimizeopt/string.py b/pypy/jit/metainterp/optimizeopt/string.py --- a/pypy/jit/metainterp/optimizeopt/string.py +++ b/pypy/jit/metainterp/optimizeopt/string.py @@ -366,9 +366,9 @@ "Handling of strings and unicodes." enabled = True - def reconstruct_for_next_iteration(self, optimizer, valuemap): - self.enabled = True - return self + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): + return OptString() def make_vstring_plain(self, box, source_op, mode): vvalue = VStringPlainValue(self.optimizer, box, source_op, mode) diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -267,7 +267,7 @@ virtual_state = modifier.get_virtual_state(jump_args) loop.preamble.operations = self.optimizer.newoperations - self.optimizer = self.optimizer.reconstruct_for_next_iteration() + self.optimizer = self.optimizer.reconstruct_for_next_iteration(jump_args) inputargs = self.inline(self.cloned_operations, loop.inputargs, jump_args) loop.inputargs = inputargs @@ -626,8 +626,9 @@ self.inliner = None - def reconstruct_for_next_iteration(self, optimizer, valuemap): - return self + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): + return OptInlineShortPreamble(self.retraced) def propagate_forward(self, op): if op.getopnum() == rop.JUMP: diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -120,7 +120,8 @@ # cached array items: {descr: CachedArrayItems} self.cached_arrayitems = {} - def reconstruct_for_next_iteration(self, optimizer, valuemap): + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): new = OptHeap() if True: diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -13,8 +13,9 @@ This includes already executed operations and constants. """ - def reconstruct_for_next_iteration(self, optimizer, valuemap): - return self + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): + return OptRewrite() def propagate_forward(self, op): args = self.optimizer.make_args_key(op) diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -51,10 +51,15 @@ boxes.append(self.force_box()) already_seen[self.get_key_box()] = None - def get_reconstructed(self, optimizer, valuemap): + def get_reconstructed(self, optimizer, valuemap, force_if_needed=True): if self in valuemap: return valuemap[self] new = self.reconstruct_for_next_iteration(optimizer) + if new is None: + if force_if_needed: + new = optimizer.OptValue(self.force_box()) + else: + return None valuemap[self] = new self.reconstruct_childs(new, valuemap) return new @@ -283,18 +288,19 @@ for o in self.optimizations: o.force_at_end_of_preamble() - def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): + def reconstruct_for_next_iteration(self, surviving_boxes=None, + optimizer=None, valuemap=None): assert optimizer is None assert valuemap is None + if surviving_boxes is None: + surviving_boxes = [] valuemap = {} new = Optimizer(self.metainterp_sd, self.loop) - optimizations = [o.reconstruct_for_next_iteration(new, valuemap) for o in - self.optimizations] + optimizations = [o.reconstruct_for_next_iteration(surviving_boxes, + new, valuemap) + for o in self.optimizations] new.set_optimizations(optimizations) - new.values = {} - for box, value in self.values.items(): - new.values[box] = value.get_reconstructed(new, valuemap) new.interned_refs = self.interned_refs new.bool_boxes = {} for value in new.bool_boxes.keys(): @@ -310,6 +316,14 @@ new.producer = self.producer assert self.posponedop is None + for box, value in self.values.items(): + box = new.getinterned(box) + force = box in surviving_boxes + value = value.get_reconstructed(new, valuemap, + force_if_needed=force) + if value is not None: + new.values[box] = value + return new def turned_constant(self, value): diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -13,9 +13,10 @@ self.posponedop = None self.nextop = None - def reconstruct_for_next_iteration(self, optimizer, valuemap): + def reconstruct_for_next_iteration(self, surviving_boxes, optimizer, + valuemap): assert self.posponedop is None - return self + return OptIntBounds() def propagate_forward(self, op): if op.is_ovf(): diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py --- a/pypy/jit/metainterp/optimizeopt/virtualize.py +++ b/pypy/jit/metainterp/optimizeopt/virtualize.py @@ -47,7 +47,7 @@ raise NotImplementedError("abstract base") def reconstruct_for_next_iteration(self, _optimizer): - return optimizer.OptValue(self.force_box()) + return None def get_fielddescrlist_cache(cpu): if not hasattr(cpu, '_optimizeopt_fielddescrlist_cache'): @@ -261,8 +261,9 @@ class OptVirtualize(optimizer.Optimization): "Virtualize objects until they escape." - def reconstruct_for_next_iteration(self, optimizer, valuemap): - return self + def reconstruct_for_next_iteration(self, surviving_boxes, + optimizer, valuemap): + return OptVirtualize() def make_virtual(self, known_class, box, source_op=None): vvalue = VirtualValue(self.optimizer, known_class, box, source_op) From commits-noreply at bitbucket.org Fri Mar 25 22:31:32 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 25 Mar 2011 22:31:32 +0100 (CET) Subject: [pypy-svn] pypy default: Don't even try to load a dynamic module when cpyext is not enabled. Message-ID: <20110325213132.9A083282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42947:0b3d3d480f7e Date: 2011-03-25 22:31 +0100 http://bitbucket.org/pypy/pypy/changeset/0b3d3d480f7e/ Log: Don't even try to load a dynamic module when cpyext is not enabled. Also kill dead code. diff --git a/pypy/module/imp/app_imp.py b/pypy/module/imp/app_imp.py deleted file mode 100644 --- a/pypy/module/imp/app_imp.py +++ /dev/null @@ -1,5 +0,0 @@ - - -def load_dynamic(name, pathname, file=None): - """Always raises ah ImportError on pypy""" - raise ImportError('Not implemented') diff --git a/pypy/module/imp/interp_imp.py b/pypy/module/imp/interp_imp.py --- a/pypy/module/imp/interp_imp.py +++ b/pypy/module/imp/interp_imp.py @@ -128,6 +128,9 @@ @unwrap_spec(filename=str) def load_dynamic(space, w_modulename, filename, w_file=None): + if not space.config.objspace.usemodules.cpyext: + raise OperationError(space.w_ImportError, space.wrap( + "Not implemented")) importing.load_c_extension(space, filename, space.str_w(w_modulename)) return importing.check_sys_modules(space, w_modulename) From commits-noreply at bitbucket.org Fri Mar 25 22:57:08 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 25 Mar 2011 22:57:08 +0100 (CET) Subject: [pypy-svn] pypy default: MixedModule tests make no sense with appdirect. Message-ID: <20110325215708.019EE282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42948:f434ee4f2674 Date: 2011-03-25 22:56 +0100 http://bitbucket.org/pypy/pypy/changeset/f434ee4f2674/ Log: MixedModule tests make no sense with appdirect. diff --git a/pypy/interpreter/test/test_extmodules.py b/pypy/interpreter/test/test_extmodules.py --- a/pypy/interpreter/test/test_extmodules.py +++ b/pypy/interpreter/test/test_extmodules.py @@ -1,4 +1,5 @@ import sys +import pytest from pypy.config.pypyoption import get_pypy_config from pypy.objspace.std import StdObjSpace @@ -60,6 +61,7 @@ def teardown_class(cls): sys.path[:] = old_sys_path + @pytest.mark.skipif("config.option.runappdirect") def test_import(self): import extmod assert extmod.__file__.endswith('extmod') diff --git a/pypy/interpreter/gateway.py b/pypy/interpreter/gateway.py --- a/pypy/interpreter/gateway.py +++ b/pypy/interpreter/gateway.py @@ -1126,7 +1126,7 @@ """ if not isinstance(source, str): source = py.std.inspect.getsource(source).lstrip() - while source.startswith('@py.test.mark.'): + while source.startswith(('@py.test.mark.', '@pytest.mark.')): # these decorators are known to return the same function # object, we may ignore them assert '\n' in source From commits-noreply at bitbucket.org Fri Mar 25 23:00:47 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Fri, 25 Mar 2011 23:00:47 +0100 (CET) Subject: [pypy-svn] pypy default: Appdirect tests really run too fast. Slow down a little Message-ID: <20110325220047.6C7A6282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42949:6a70d2e055d6 Date: 2011-03-25 23:00 +0100 http://bitbucket.org/pypy/pypy/changeset/6a70d2e055d6/ Log: Appdirect tests really run too fast. Slow down a little diff --git a/pypy/module/thread/test/test_thread.py b/pypy/module/thread/test/test_thread.py --- a/pypy/module/thread/test/test_thread.py +++ b/pypy/module/thread/test/test_thread.py @@ -225,7 +225,7 @@ def busy_wait(): for x in range(1000): - pass + time.sleep(0.01) # This is normally called by app_main.py signal.signal(signal.SIGINT, signal.default_int_handler) From commits-noreply at bitbucket.org Fri Mar 25 23:56:57 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:56:57 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: improve intro and background Message-ID: <20110325225657.55BF7282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3404:90f98d369efc Date: 2011-03-25 21:48 +0100 http://bitbucket.org/pypy/extradoc/changeset/90f98d369efc/ Log: improve intro and background diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -156,17 +156,18 @@ \label{sect:pypy} The PyPy project \cite{armin_rigo_pypys_2006} strives to be an environment where -complex dynamic languages can be efficiently implemented. The approach taken -when implement a language with PyPy is to write an interpreter for the language +complex dynamic languages can be implemented efficiently. The approach taken +when implementing a language with PyPy is to write an interpreter for the language in \emph{RPython}. RPython is a restricted subset of Python chosen in such a way that it is possible to perform type inference on it. The interpreters in RPython can therefore be translated to efficient C code. A number of languages have been implemented with PyPy, most importantly a full -Python implementation, but also a Prolog interpreter \cite{XXX} and a Smalltalk -VM \cite{XXX}. +Python implementation, but also a Prolog interpreter +\cite{carl_friedrich_bolz_towards_2010} and a Smalltalk VM +\cite{carl_friedrich_bolz_back_2008}. -This translation to C code adds a number of implementation details into the +The translation of the interpreter to C code adds a number of implementation details into the final executable that are not present in the interpreter implementation, such as a garbage collector. The interpreter can therefore be kept free from low-level implementation details. Another aspect of the final VM that is added @@ -179,14 +180,21 @@ \subsection{PyPy's Meta-Tracing JIT Compilers} \label{sect:tracing} -XXX citations A recently popular approach to JIT compilers is that of tracing JITs. Tracing -JITs record traces of concrete execution paths through the program. Those +JITs have their origin in the Dynamo project which used the for dynamic +assembler optimization \cite{XXX}. Later they were used for to implement +a lightweight JIT for Java \cite{XXX} and for dynamic languages such as +JavaScript \cite{XXX}. + +A tracing JIT works by recording traces of concrete execution paths through the +program. Those traces are therefore linear list of operations, which are optimized and then get turned into machine code. To be able to do this recording, VMs with a -tracing JIT typically also contain an interpreter. After a user program is +tracing JIT typically contain an interpreter. After a user program is started the interpreter is used until the most important paths through the user -program are turned into machine code. +program are turned into machine code. The tracing JIT tries to produce traces +that correspond to loops in the traced program, but most tracing JITs now also +have support for tracing non-loops \cite{XXX}. Because the traces always correspond to a concrete execution they cannot contain any control flow splits. Therefore they encode the control flow @@ -195,7 +203,8 @@ later executed with different values. One disadvantage of tracing JITs which makes them not directly applicable to -PyPy is that they encode the language semantics. Since PyPy wants to be a +PyPy is that they need to encode the language semantics of the language they are +tracing. Since PyPy wants to be a general framework, we want to reuse our tracer for different languages. Therefore PyPy's JIT is a meta-tracer \cite{bolz_tracing_2009}. It does not trace the execution of the user program, but instead traces the execution of @@ -203,7 +212,14 @@ it produces don't contain the bytecodes of the language in question, but RPython-level operations that the interpreter did to execute the program. -On the other hand, the loops that are traced by the tracer are the loops in the +Tracing through the execution of an interpreter has many advantages. It makes +the tracer, its optimizers and backends reusable for a variety of languages. The +language semantics do not need to be encoded into the JIT. Instead the tracer +just picks them up from the interpreter. XXX mention disadvantage of long +traces? + +While the operations in a trace are those of the interpreter, the loops that are +traced by the tracer are the loops in the user program. This means that the tracer stops tracing after one iteration of the loop in the user function that is being considered. At this point, it can have traced many iterations of the interpreter main loop. @@ -222,8 +238,6 @@ of the interpreter. However, the extent of the trace is determined by the loops in the user program. -XXX trace makes the object model operations explicit and transparent to the -optimizer \subsection{Optimizing Traces} \label{sub:optimizing} From commits-noreply at bitbucket.org Fri Mar 25 23:56:58 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:56:58 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: more reference, a sentence about inlining Message-ID: <20110325225658.20890282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3405:d1c265ea1d9f Date: 2011-03-25 22:12 +0100 http://bitbucket.org/pypy/extradoc/changeset/d1c265ea1d9f/ Log: more reference, a sentence about inlining diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -93,7 +93,7 @@ It has long been an objective of the partial evaluation community to automatically produce compilers from interpreters. There has been a recent renaissance of this idea using the different technique of tracing just-in-time -compilers. A number of projects have attempted this approach. SPUR \cite{XXX} is +compilers. A number of projects have attempted this approach. SPUR \cite{bebenita_spur:_2010} is a tracing JIT for .NET together with a JavaScript implementation in C\#. PyPy \cite{armin_rigo_pypys_2006} contains a tracing JIT for RPython (a restricted subset of Python). This JIT is then used to trace a number of languages @@ -182,14 +182,18 @@ A recently popular approach to JIT compilers is that of tracing JITs. Tracing JITs have their origin in the Dynamo project which used the for dynamic -assembler optimization \cite{XXX}. Later they were used for to implement -a lightweight JIT for Java \cite{XXX} and for dynamic languages such as -JavaScript \cite{XXX}. +assembler optimization \cite{bala_dynamo:_2000}. Later they were used for to implement +a lightweight JIT for Java \cite{gal_hotpathvm:_2006} and for dynamic languages such as +JavaScript \cite{gal_trace-based_2009}. A tracing JIT works by recording traces of concrete execution paths through the program. Those traces are therefore linear list of operations, which are optimized and then -get turned into machine code. To be able to do this recording, VMs with a +get turned into machine code. This recording automatically inlines functions, +when a function call is encountered the operations of the called functions are +simply put into the trace too. + +To be able to do this recording, VMs with a tracing JIT typically contain an interpreter. After a user program is started the interpreter is used until the most important paths through the user program are turned into machine code. The tracing JIT tries to produce traces diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -69,7 +69,87 @@ author = {Michael Bebenita and Florian Brandner and Manuel Fahndrich and Francesco Logozzo and Wolfram Schulte and Nikolai Tillmann and Herman Venter}, year = {2010}, keywords = {cil, dynamic compilation, javascript, just-in-time, tracing}, - pages = {708--725} + pages = {708--725}, + annote = {{\textless}h3{\textgreater}{\textless}a {href="http://morepypy.blogspot.com/2010/07/comparing-spur-to-pypy.html"{\textgreater}Comparing} {SPUR} to {PyPy{\textless}/a{\textgreater}{\textless}/h3{\textgreater}} +{{\textless}p{\textgreater}Recently,} I've become aware of the {\textless}a {href="http://research.microsoft.com/en-us/projects/spur/"{\textgreater}SPUR} project{\textless}/a{\textgreater} of Microsoft Research and read some of their papers (the tech report {"SPUR:} A {Trace-Based} {JIT} Compiler for {CIL"} is very cool). I found the project to be very interesting and since their approach is in many ways related to what {PyPy} is doing, I now want to compare and contrast the two projects.{\textless}/p{\textgreater} +{\textless}div id="a-tracing-jit-for-net"{\textgreater} +{{\textless}h2{\textgreater}A} Tracing {JIT} for {.NET{\textless}/h2{\textgreater}} +{{\textless}p{\textgreater}SPUR} consist of two parts: On the one hand it is a {VM} for {CIL,} the bytecode of the {.NET} {VM.} This {VM} uses a tracing {JIT} compiler to compile the programs it is running to machine code. As opposed to most existing {VMs} that have a tracing {JIT} it does not use an interpreter at all. Instead it contains various variants of a {JIT} compiler that produce different versions of each method. Those are:{\textless}/p{\textgreater} +{\textless}ul{\textgreater} +{\textless}li{\textgreater}a {\textless}em{\textgreater}profiling {JIT{\textless}/em{\textgreater}} which produces code that does lightweight profiling when running the compiled method{\textless}/li{\textgreater} +{\textless}li{\textgreater}a {\textless}em{\textgreater}tracing {JIT{\textless}/em{\textgreater}} which produces code that produces a trace when running the compiled method{\textless}/li{\textgreater} +{\textless}li{\textgreater}a {\textless}em{\textgreater}transfer-tail {JIT{\textless}/em{\textgreater}} which is used to produce code which is run to get from a failing guard back to the normal profiling version of a method{\textless}/li{\textgreater} +{\textless}li{\textgreater}an {\textless}em{\textgreater}optimizing {JIT{\textless}/em{\textgreater}} that actually optimizes traces and turns them into machine code{\textless}/li{\textgreater} +{\textless}/ul{\textgreater} +{\textless}div id="optimizations-done-by-the-optimizing-jit"{\textgreater} +{{\textless}h3{\textgreater}Optimizations} Done by the Optimizing {JIT{\textless}/h3{\textgreater}} +{{\textless}p{\textgreater}SPUR's} optimizing {JIT} does a number of powerful optimizations on the traces before it turns them into machine code. Among them are usual compiler optimizations such as register allocation, common subexpression elimination, loop invariant code motion, etc.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}It} also performs some optimizations that are specific to the tracing context and are thus not commonly found in "normal" compilers:{\textless}/p{\textgreater} +{\textless}ul{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}guard implication{\textless}/em{\textgreater}: if a guard is implied by an earlier guard, it is removed{\textless}/li{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}guard strengthening{\textless}/em{\textgreater}: if there is a sequence of guards that become stronger and stronger (i.e. each guard implies the previous one), the first guard in the sequence is replaced by the last one, and all others are removed. This can greatly reduce the number of guards and is generally safe. It can shift a guard failure to an earlier point in the trace, but the failure would have occurred at some point in the trace anyway.{\textless}/li{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}load/store optimizations{\textless}/em{\textgreater}: this is an optimization for memory reads/writes. If several loads from the same memory location occur without writes in between, all but the first one are removed. Similarly, if a write to a memory location is performed, this write is delayed as much as possible. If there is a write to the same location soon afterwards, the first write can be removed.{\textless}/li{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}escape analysis{\textless}/em{\textgreater}: for allocations that occur in a loop, the optimizer checks whether the resulting object escapes the loop. If not, the allocation is moved before the loop, so that only one object needs to be allocated, instead of one every loop iteration.{\textless}/li{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}user-controlled loop unrolling{\textless}/em{\textgreater}: not exactly an optimization, but an interesting feature anyway. It is possible to annotate a {CIL} method with a special decorator {{\textless}tt{\textgreater}[TraceUnfold]{\textless}/tt{\textgreater}} and then the tracing {JIT} will fully unroll the loops it contains. This can be useful for loops than are known to run a small and fixed number of iterations for each call-site.{\textless}/li{\textgreater} +{\textless}li{\textgreater}{\textless}em{\textgreater}user controlled tracing{\textless}/em{\textgreater}: The user can also control tracing up to a point. Methods can be annotated with {{\textless}tt{\textgreater}[NativeCall]{\textless}/tt{\textgreater}} to tell the tracer to never trace their execution. Instead they appear as a direct call in the trace.{\textless}/li{\textgreater} +{\textless}/ul{\textgreater} +{\textless}/div{\textgreater} +{\textless}/div{\textgreater} +{\textless}div id="a-javascript-implementation"{\textgreater} +{{\textless}h2{\textgreater}A} {JavaScript} Implementation{\textless}/h2{\textgreater} +{{\textless}p{\textgreater}In} addition to the tracing {JIT} I just described, {SPUR} also contains a {JavaScript} implementation for {.NET.} The approach of this implementation is to translate {JavaScript} to {CIL} bytecode, doing some amount of type inference to detect variables that have fixed types. All operations where no precise type could be determined are implemented with calls to a {JavaScript} runtime system, which does the necessary type dispatching. The {JavaScript} runtime is implemented in C\#.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}The} {JavaScript} implementation and the {CLI} {VM} with a tracing {JIT} sound quite unrelated at first, but together they amplify each other. The tracing {JIT} traces the {JavaScript} functions that have been translated to {CLI} bytecode. Since the {JavaScript} runtime is in C\#, it exists as {CLI} bytecode too. Thus it can be inlined into the {JavaScript} functions by the tracer. This is highly beneficial, since it exposes the runtime type dispatching of the {JavaScript} operations to the optimizations of the tracing {JIT.} Particularly the common expression elimination helps the {JavaScript} code. If a series of operations is performed on the same object, the operations will all do the same type checks. All but the type checks of the first operation can be removed by the optimizer.{\textless}/p{\textgreater} +{\textless}div id="performance-results"{\textgreater} +{{\textless}h3{\textgreater}Performance} Results{\textless}/h3{\textgreater} +{{\textless}p{\textgreater}The} speed results of the combined {JavaScript} implementation and tracing {JIT} are quite impressive. It beats {TraceMonkey} for most benchmarks in {SunSpider} (apart from some string-heavy benchmarks that are quite slow) and can compete with V8 in many of them. However, all this is steady-state performance and it seems {SPUR's} compile time is rather bad currently.{\textless}/p{\textgreater} +{\textless}/div{\textgreater} +{\textless}div id="further-possibilities"{\textgreater} +{{\textless}h3{\textgreater}Further} Possibilities{\textless}/h3{\textgreater} +{{\textless}p{\textgreater}A} further (so far still hypothetical) advantage of {SPUR} is that the approach can optimize cases where execution crosses the border of two different systems. If somebody wrote an {HTML} layout engine and a {DOM} in C\# to get a web browser and integrated it with the {JavaScript} implementation described above, the tracing {JIT} could optimize {DOM} manipulations performed by {JavaScript} code as well as callbacks from the browser into {JavaScript} code.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}Of} course the approach {SPUR} takes to implement {JavaScript} is completely generalizable. It should be possible to implement other dynamic languages in the same way as {JavaScript} using {SPUR.} One would have to write a runtime system for the language in C\#, as well as a compiler from the language into {CIL} bytecode. Given these two elements, {SPUR's} tracing {JIT} compiler would probably do a reasonable job at optimizing this other language (of course in practise, the language implementation would need some tweaking and annotations to make it really fast).{\textless}/p{\textgreater} +{\textless}/div{\textgreater} +{\textless}/div{\textgreater} +{\textless}div id="comparison-with-pypy"{\textgreater} +{{\textless}h2{\textgreater}Comparison} With {PyPy{\textless}/h2{\textgreater}} +{{\textless}p{\textgreater}The} goals of {PyPy} and {SPUR} are very similar. Both projects want to implement dynamic languages in an efficient way by using a tracing {JIT.} Both apply the tracing {JIT} "one level down", i.e. the runtime system of the dynamic language is visible to the tracing {JIT.} This is the crucial point of the approach of both projects. Since the runtime system of the dynamic language is visible to the tracing {JIT,} the {JIT} can optimize programs in that dynamic language. It does not itself need to know about the semantics of the dynamic language. This makes the tracing {JIT} usable for a variety of dynamic languages. It also means that the two halves can be implemented and debugged independently.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}In} {SPUR,} C\# (or another language that is compilable to {CIL)} plays the role of {RPython,} and {CIL} is equivalent to the intermediate format that {PyPy's} translation toolchain uses. Both formats operate on a similar abstraction level, they are quite close to C, but still have support for the object system of their respective language and are garbage-collected.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}SPUR} supports only a {JavaScript} implementation so far, which could maybe change in the future. Thus {JavaScript} in {SPUR} corresponds to Python in {PyPy,} which was the first dynamic language implemented in {PyPy} (and is also the reason for {PyPy's} existence).{\textless}/p{\textgreater} +{{\textless}p{\textgreater}There} are obviously also differences between the two projects, although many of them are only skin-deep. The largest difference is the reliance of {SPUR} on compilers on all levels. {PyPy} takes the opposite approach of using interpreters almost everywhere. The parts of {PyPy} that correspond to {SPUR's} compilers are {(I} will use the Python implementation of {PyPy} as an example):{\textless}/p{\textgreater} +{\textless}ul{\textgreater} +{\textless}li{\textgreater}the {{\textless}em{\textgreater}JavaScript-to-CIL} compiler{\textless}/em{\textgreater} corresponds to the Python interpreter of {PyPy{\textless}/li{\textgreater}} +{\textless}li{\textgreater}the {\textless}em{\textgreater}profiling {JIT{\textless}/em{\textgreater}} corresponds to a part of {PyPy's} translation toolchain which adds some profiling support in the process of turning {RPython} code into C code,{\textless}/li{\textgreater} +{\textless}li{\textgreater}the {\textless}em{\textgreater}tracing {JIT{\textless}/em{\textgreater}} corresponds to a special interpreter in the {PyPy} {JIT} which executes an {RPython} program and produces a trace of the execution{\textless}/li{\textgreater} +{\textless}li{\textgreater}the {\textless}em{\textgreater}transfer-tail {JIT{\textless}/em{\textgreater}} corresponds to {PyPy's} {\textless}a href="http://morepypy.blogspot.com/2010/06/blackhole-interpreter.html"{\textgreater}blackhole interpreter{\textless}/a{\textgreater}, also called fallback interpreter{\textless}/li{\textgreater} +{\textless}li{\textgreater}the {\textless}em{\textgreater}optimizing {JIT{\textless}/em{\textgreater}} corresponds to the optimizers and backends of {PyPy's} {JIT{\textless}/li{\textgreater}} +{\textless}/ul{\textgreater} +{\textless}div id="pypy-s-optimizations"{\textgreater} +{{\textless}h3{\textgreater}PyPy's} Optimizations{\textless}/h3{\textgreater} +{{\textless}p{\textgreater}Comparing} the optimizations that the two projects perform, the biggest difference is that {PyPy} does "trace stitching" instead of fully supporting trace trees. The difference between the two concerns what happens when a new trace gets added to an existing loop. The new trace starts from a guard in the existing loop that was observed to fail often. Trace stitching means that the loop is just patched with a jump to the new trace. {SPUR} instead recompiles the whole trace tree, which gives the optimizers more opportunities, but also makes compilation a lot slower. Another difference is that {PyPy} does not perform loop-invariant code motion yet.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}Many} of the remaining optimizations are very similar. {PyPy} supports guard implication as well as guard strengthening. It has some load/store optimizations, but {PyPy's} alias analysis is quite rudimentary. On the other hand, {PyPy's} escape analysis is very powerful. {PyPy} also has support for the annotations that {SPUR} supports, using some decorators in the {\textless}tt{\textgreater}pypy.rlib.jit{\textless}/tt{\textgreater} module. User-controlled loop unrolling is performed using the {\textless}tt{\textgreater}unroll\_safe{\textless}/tt{\textgreater} decorator, tracing of a function can be disabled with the {\textless}tt{\textgreater}dont\_look\_inside{\textless}/tt{\textgreater} decorator.{\textless}/p{\textgreater} +{{\textless}p{\textgreater}PyPy} has a few more annotations that were not mentioned in the {SPUR} tech report. Most importantly, it is possible to declare a function as pure, using the {\textless}tt{\textgreater}purefunction{\textless}/tt{\textgreater} decorator. {PyPy's} optimizers will remove calls to a function decorated that way if the arguments to the call are all constant. In addition it is possible to declare instances of classes to be immutable, which means that field accesses on constant instances can be folded away. Furthermore there is the promote hint, which is spelled {\textless}tt{\textgreater}x = hint(x, {promote=True){\textless}/tt{\textgreater}.} This will produce a guard in the trace, to turn {\textless}tt{\textgreater}x{\textless}/tt{\textgreater} into a constant after the guard.{\textless}/p{\textgreater} +{\textless}/div{\textgreater} +{\textless}/div{\textgreater} +{\textless}div id="summary"{\textgreater} +{{\textless}h2{\textgreater}Summary{\textless}/h2{\textgreater}} +{{\textless}p{\textgreater}Given} the similarity between the projects' goals, it is perhaps not so surprising to see that {PyPy} and {SPUR} have co-evolved and reached many similar design decisions. It is still very good to see another project that does many things in the same way as {PyPy.{\textless}/p{\textgreater}} +{\textless}/div{\textgreater}} +}, + + at inproceedings{gal_trace-based_2009, + address = {New York, {NY,} {USA}}, + series = {{PLDI} '09}, + title = {Trace-based just-in-time type specialization for dynamic languages}, + isbn = {978-1-60558-392-1}, + location = {Dublin, Ireland}, + doi = {10.1145/1542476.1542528}, + abstract = {Dynamic languages such as {JavaScript} are more difficult to compile than statically typed ones. Since no concrete type information is available, traditional compilers need to emit generic code that can handle all possible type combinations at runtime. We present an alternative compilation technique for dynamically-typed languages that identifies frequently executed loop traces at run-time and then generates machine code on the fly that is specialized for the actual dynamic types occurring on each path through the loop. Our method provides cheap inter-procedural type specialization, and an elegant and efficient way of incrementally compiling lazily discovered alternative paths through nested loops. We have implemented a dynamic compiler for {JavaScript} based on our technique and we have measured speedups of 10x and more for certain benchmark programs.}, + booktitle = {{ACM} {SIGPLAN} Notices}, + publisher = {{ACM}}, + author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and David Mandelin and Mohammad R Haghighat and Blake Kaplan and Graydon Hoare and Boris Zbarsky and Jason Orendorff and Jesse Ruderman and Edwin W Smith and Rick Reitmaier and Michael Bebenita and Mason Chang and Michael Franz}, + year = {2009}, + note = {{ACM} {ID:} 1542528}, + keywords = {code generation, design, dynamically typed languages, experimentation, incremental compilers, languages, measurement, performance, run-time environments, trace-based compilation}, + pages = {465{\textendash}478} }, @article{bolz_allocation_2011, @@ -86,6 +166,20 @@ pages = {43{\textendash}52} }, + at article{gal_trace-based_2009-1, + series = {{PLDI} '09}, + title = {Trace-based just-in-time type specialization for dynamic languages}, + location = {Dublin, Ireland}, + doi = {10.1145/1542476.1542528}, + abstract = {Dynamic languages such as {JavaScript} are more difficult to compile than statically typed ones. Since no concrete type information is available, traditional compilers need to emit generic code that can handle all possible type combinations at runtime. We present an alternative compilation technique for dynamically-typed languages that identifies frequently executed loop traces at run-time and then generates machine code on the fly that is specialized for the actual dynamic types occurring on each path through the loop. Our method provides cheap inter-procedural type specialization, and an elegant and efficient way of incrementally compiling lazily discovered alternative paths through nested loops. We have implemented a dynamic compiler for {JavaScript} based on our technique and we have measured speedups of 10x and more for certain benchmark programs.}, + journal = {{ACM} {SIGPLAN} Notices}, + author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and David Mandelin and Mohammad R Haghighat and Blake Kaplan and Graydon Hoare and Boris Zbarsky and Jason Orendorff and Jesse Ruderman and Edwin W Smith and Rick Reitmaier and Michael Bebenita and Mason Chang and Michael Franz}, + year = {2009}, + note = {{ACM} {ID:} 1542528}, + keywords = {code generation, design, dynamically typed languages, experimentation, incremental compilers, languages, measurement, performance, run-time environments, trace-based compilation}, + pages = {465{\textendash}478} +}, + @inproceedings{chang_tracing_2009, address = {Washington, {DC,} {USA}}, title = {Tracing for Web 3.0: Trace Compilation for the Next Generation Web Applications}, @@ -156,14 +250,6 @@ annote = {{{\textless}p{\textgreater}The} paper evaluates the various ways in which a number of Java papers do their Java benchmarks. It then proposes a statistically correct way to do this and compares common approaches against the statistically correct way. Especially if the results of two alternatives are very close together, many common approaches can lead to systematic errors.{\textless}/p{\textgreater}} }, - at inproceedings{andreas_gal_trace-based_2009, - title = {Trace-based {Just-in-Time} Type Specialization for Dynamic Languages}, - booktitle = {{PLDI}}, - author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and Blake Kaplan and Graydon Hoare and David Mandelin and Boris Zbarsky and Jason Orendorff and Michael Bebenita and Mason Chang and Michael Franz and Edwin Smith and Rick Reitmaier and Mohammad Haghighat}, - year = {2009}, - keywords = {toappear} -}, - @inproceedings{bolz_tracing_2009, address = {Genova, Italy}, title = {Tracing the meta-level: {PyPy's} tracing {JIT} compiler}, From commits-noreply at bitbucket.org Fri Mar 25 23:57:02 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:57:02 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: use listings instead of pygmentize Message-ID: <20110325225702.4187E2A203A@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3406:5ac85459c63b Date: 2011-03-25 22:58 +0100 http://bitbucket.org/pypy/extradoc/changeset/5ac85459c63b/ Log: use listings instead of pygmentize diff --git a/talk/icooolps2011/code/trace2.tex b/talk/icooolps2011/code/trace2.tex --- a/talk/icooolps2011/code/trace2.tex +++ b/talk/icooolps2011/code/trace2.tex @@ -1,17 +1,17 @@ -\begin{Verbatim}[commandchars=\\\{\}] +\begin{lstlisting}[escapechar=|,basicstyle=\ttfamily]] # inst.getattr("a") map1 = inst.map guard(map1 == 0xb74af4a8) -{\color{gray}index1 = Map.getindex(map1, "a")} -{\color{gray}guard(index1 != -1)} +|{\color{gray}index1 = Map.getindex(map1, "a")}| +|{\color{gray}guard(index1 != -1)}| storage1 = inst.storage result1 = storage1[index1] # inst.getattr("b") -{\color{gray}map2 = inst.map} -{\color{gray}guard(map2 == 0xb74af4a8)} -{\color{gray}index2 = Map.getindex(map2, "b")} -{\color{gray}guard(index2 == -1)} +|{\color{gray}map2 = inst.map}| +|{\color{gray}guard(map2 == 0xb74af4a8)}| +|{\color{gray}index2 = Map.getindex(map2, "b")}| +|{\color{gray}guard(index2 == -1)}| cls1 = inst.cls methods1 = cls.methods result2 = dict.get(methods1, "b") @@ -19,10 +19,10 @@ v2 = result1 + result2 # inst.getattr("c") -{\color{gray}map3 = inst.map} -{\color{gray}guard(map3 == 0xb74af4a8)} -{\color{gray}index3 = Map.getindex(map3, "c")} -{\color{gray}guard(index3 == -1)} +|{\color{gray}map3 = inst.map}| +|{\color{gray}guard(map3 == 0xb74af4a8)}| +|{\color{gray}index3 = Map.getindex(map3, "c")}| +|{\color{gray}guard(index3 == -1)}| cls1 = inst.cls methods2 = cls.methods result3 = dict.get(methods2, "c") @@ -30,4 +30,4 @@ v4 = v2 + result3 return(v4) -\end{Verbatim} +\end{lstlisting} diff --git a/talk/icooolps2011/code/trace5.tex b/talk/icooolps2011/code/trace5.tex --- a/talk/icooolps2011/code/trace5.tex +++ b/talk/icooolps2011/code/trace5.tex @@ -1,4 +1,4 @@ -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] # inst.getattr("a") map1 = inst.map guard(map1 == 0xb74af4a8) @@ -15,4 +15,4 @@ # inst.getattr("c") v4 = v2 + 17 return(v4) -\end{Verbatim} +\end{lstlisting} diff --git a/talk/icooolps2011/code/interpreter-slow.tex b/talk/icooolps2011/code/interpreter-slow.tex --- a/talk/icooolps2011/code/interpreter-slow.tex +++ b/talk/icooolps2011/code/interpreter-slow.tex @@ -1,39 +1,39 @@ -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +class Class(object): + def __init__(self, name): + self.name = name + self.methods = {} - \PY{k}{def} \PY{n+nf}{instantiate}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n}{Instance}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)} + def instantiate(self): + return Instance(self) - \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + def find_method(self, name): + result = self.methods.get(name) + if result is not None: + return result + raise AttributeError(name) - \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + def change_method(self, name, value): + self.methods[name] = value -\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} +class Instance(object): + def __init__(self, cls): + self.cls = cls + self.attributes = {} - \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + def getfield(self, name): + result = self.attributes.get(name) + if result is not None: + return result + raise AttributeError(name) - \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{attributes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} + def write_attribute(self, name, value): + self.attributes[name] = value - \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{try}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} -\end{Verbatim} + def getattr(self, name): + try: + return self.getfield(name) + except AttributeError: + return self.cls.find_method(name) +\end{lstlisting} diff --git a/talk/icooolps2011/code/version.tex b/talk/icooolps2011/code/version.tex --- a/talk/icooolps2011/code/version.tex +++ b/talk/icooolps2011/code/version.tex @@ -1,26 +1,26 @@ -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{VersionTag}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{pass} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +class VersionTag(object): + pass -\PY{k}{class} \PY{n+nc}{Class}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{name} \PY{o}{=} \PY{n}{name} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} +class Class(object): + def __init__(self, name): + self.name = name + self.methods = {} + self.version = VersionTag() - \PY{k}{def} \PY{n+nf}{find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{version} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{result} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)} - \PY{k}{if} \PY{n}{result} \PY{o+ow}{is} \PY{o+ow}{not} \PY{n+nb+bp}{None}\PY{p}{:} - \PY{k}{return} \PY{n}{result} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + def find_method(self, name): + self = hint(self, promote=True) + version = hint(self.version, promote=True) + result = self._find_method(name, version) + if result is not None: + return result + raise AttributeError(name) - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{\PYZus{}find\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{version}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{)} + @purefunction + def _find_method(self, name, version): + return self.methods.get(name) - \PY{k}{def} \PY{n+nf}{change\PYZus{}method}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{methods}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{value} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{version} \PY{o}{=} \PY{n}{VersionTag}\PY{p}{(}\PY{p}{)} -\end{Verbatim} + def change_method(self, name, value): + self.methods[name] = value + self.version = VersionTag() +\end{lstlisting} diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -8,9 +8,25 @@ \usepackage{xspace} \usepackage[scaled=0.8]{beramono} \usepackage[utf8]{inputenc} +\usepackage{setspace} +\usepackage{listings} \input{code/style.tex} +\lstset{ + basicstyle=\ttfamily\footnotesize, + language=Python, + keywordstyle=\bfseries, + stringstyle=\color{blue}, + commentstyle=\color{gray}\textit, + fancyvrb=true, + showstringspaces=false, + %keywords={def,while,if,elif,return,class,get,set,new,guard_class} + numberstyle = \tiny, + numbersep = -20pt, +} + + \newboolean{showcomments} \setboolean{showcomments}{true} \ifthenelse{\boolean{showcomments}} @@ -296,9 +312,9 @@ dictionaries. Let's assume we trace through code that sums three attributes, such as: -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] inst.getattr("a") + inst.getattr("b") + inst.getattr("c") -\end{Verbatim} +\end{lstlisting} \begin{figure} \input{code/trace1.tex} @@ -360,18 +376,19 @@ However, the optimizer can statically know the value of a variable even if it is not a constant in the original source code. For example, consider the following fragment of RPython code: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{if} \PY{n}{x} \PY{o}{==} \PY{l+m+mi}{4}\PY{p}{:} - \PY{n}{y} \PY{o}{=} \PY{n}{y} \PY{o}{+} \PY{n}{x} -\end{Verbatim} + +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +if x == 4: + y = y + x +\end{lstlisting} If the fragment is traced with \texttt{x} being \texttt{4}, the following trace is produced: % -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(x == 4) y = y + x -\end{Verbatim} +\end{lstlisting} In the trace above, the value of \texttt{x} is statically known thanks to the guard. Remember that a guard is a runtime check. The above trace will run to @@ -392,31 +409,32 @@ a lot of computation depending on the value of that variable. Let's make this more concrete. If we trace a call to the following function: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{def} \PY{n+nf}{f1}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} - \PY{n}{z} \PY{o}{=} \PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} - \PY{k}{return} \PY{n}{z} \PY{o}{+} \PY{n}{y} -\end{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +def f2(x, y): + x = hint(x, promote=True) + z = x * 2 + 1 + return z + y +\end{lstlisting} We get a trace that looks like this: -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] v1 = x * 2 z = v1 + 1 v2 = z + y return(v2) -\end{Verbatim} +\end{lstlisting} Observe how the first two operations could be constant-folded if the value of \texttt{x} were known. Let's assume that the value of \texttt{x} can vary, but does so rarely, i.e. only takes a few different values at runtime. If this is the case, we can add a hint to promote \texttt{x}, like this: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{def} \PY{n+nf}{f2}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} - \PY{n}{x} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n}{x}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{z} \PY{o}{=} \PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} - \PY{k}{return} \PY{n}{z} \PY{o}{+} \PY{n}{y} -\end{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +def f2(x, y): + x = hint(x, promote=True) + z = x * 2 + 1 + return z + y +\end{lstlisting} The meaning of this hint is that the tracer should pretend that \texttt{x} is a constant @@ -426,13 +444,13 @@ the arguments \texttt{4} and \texttt{8}. The trace will be the same, except for one operation at the beginning: -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(x == 4) v1 = x * 2 z = v1 + 1 v2 = z + y return(v2) -\end{Verbatim} +\end{lstlisting} The promotion is turned into a \texttt{guard} operation in the trace. The guard captures the value of \texttt{x} as it was at runtime. From the point of view of the @@ -441,11 +459,11 @@ assume that \texttt{x} is equal to \texttt{4}, meaning that the optimizer will turn this trace into: -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(x == 4) v2 = 9 + y return(v2) -\end{Verbatim} +\end{lstlisting} Notice how the first two arithmetic operations were constant folded. The hope is that the guard is executed quicker than the multiplication and the addition that @@ -457,11 +475,11 @@ capture a different value of \texttt{x}. If it is e.g. \texttt{2}, then the optimized trace looks like this: -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(x == 2) v2 = 5 + y return(v2) -\end{Verbatim} +\end{lstlisting} This new trace will be attached to the guard instruction of the first trace. If \texttt{x} takes on even more values, a new trace will eventually be made for all of them, @@ -493,29 +511,29 @@ As an example, take the following class: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{A}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{=} \PY{n}{x} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n}{y} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +class A(object): + def __init__(self, x, y): + self.x = x + self.y = y - \PY{k}{def} \PY{n+nf}{f}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{val}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{compute}\PY{p}{(}\PY{p}{)} \PY{o}{+} \PY{n}{val} + def f(self, val): + self.y = self.compute() + val - \PY{k}{def} \PY{n+nf}{compute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} -\end{Verbatim} + def compute(self): + return self.x * 2 + 1 +\end{lstlisting} Tracing the call \texttt{a.f(10)} of some instance of \texttt{A} yields the following trace (note how the call to \texttt{compute} is inlined): % -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] x = a.x v1 = x * 2 v2 = v1 + 1 v3 = v2 + val a.y = v3 -\end{Verbatim} +\end{lstlisting} In this case, adding a promote of \texttt{self} in the \texttt{f} method to get rid of the computation of the first few operations does not help. Even if \texttt{a} is a @@ -527,29 +545,29 @@ is a pure function. To communicate this, there is a \texttt{purefunction} decorator. If the code in \texttt{compute} should be constant-folded away, we would change the class as follows: -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{A}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{x}\PY{p}{,} \PY{n}{y}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{=} \PY{n}{x} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n}{y} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +class A(object): + def __init__(self, x, y): + self.x = x + self.y = y - \PY{k}{def} \PY{n+nf}{f}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{val}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{y} \PY{o}{=} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{compute}\PY{p}{(}\PY{p}{)} \PY{o}{+} \PY{n}{val} + def f(self, val): + self = hint(self, promote=True) + self.y = self.compute() + val - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{compute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{x} \PY{o}{*} \PY{l+m+mi}{2} \PY{o}{+} \PY{l+m+mi}{1} -\end{Verbatim} + @purefunction + def compute(self): + return self.x * 2 + 1 +\end{lstlisting} Now the trace will look like this: % -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(a == 0xb73984a8) v1 = compute(a) v2 = v1 + val a.y = v2 -\end{Verbatim} +\end{lstlisting} Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used during tracing. The call to \texttt{compute} is not inlined, so that the optimizer @@ -558,11 +576,11 @@ is a constant reference, the call will be removed by the optimizer. The final trace looks like this: % -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] guard(a == 0xb73984a8) v2 = 9 + val a.y = v2 -\end{Verbatim} +\end{lstlisting} (assuming that the \texttt{x} field's value is \texttt{4}). diff --git a/talk/icooolps2011/Makefile b/talk/icooolps2011/Makefile --- a/talk/icooolps2011/Makefile +++ b/talk/icooolps2011/Makefile @@ -1,5 +1,5 @@ -jit-hints.pdf: paper.tex paper.bib code/interpreter-slow.tex code/map.tex code/version.tex +jit-hints.pdf: paper.tex paper.bib pdflatex paper bibtex paper pdflatex paper diff --git a/talk/icooolps2011/code/map.tex b/talk/icooolps2011/code/map.tex --- a/talk/icooolps2011/code/map.tex +++ b/talk/icooolps2011/code/map.tex @@ -1,49 +1,49 @@ -\begin{Verbatim}[commandchars=\\\{\}] -\PY{k}{class} \PY{n+nc}{Map}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps} \PY{o}{=} \PY{p}{\PYZob{}}\PY{p}{\PYZcb{}} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +class Map(object): + def __init__(self): + self.indexes = {} + self.other_maps = {} - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{getindex}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{get}\PY{p}{(}\PY{n}{name}\PY{p}{,} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{)} + @purefunction + def getindex(self, name): + return self.indexes.get(name, -1) - \PY{n+nd}{@purefunction} - \PY{k}{def} \PY{n+nf}{add\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{if} \PY{n}{name} \PY{o+ow}{not} \PY{o+ow}{in} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{:} - \PY{n}{newmap} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} - \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{o}{.}\PY{n}{update}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)} - \PY{n}{newmap}\PY{o}{.}\PY{n}{indexes}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n+nb}{len}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{indexes}\PY{p}{)} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} \PY{o}{=} \PY{n}{newmap} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{other\PYZus{}maps}\PY{p}{[}\PY{n}{name}\PY{p}{]} + @purefunction + def add_attribute(self, name): + if name not in self.other_maps: + newmap = Map() + newmap.indexes.update(self.indexes) + newmap.indexes[name] = len(self.indexes) + self.other_maps[name] = newmap + return self.other_maps[name] -\PY{n}{EMPTY\PYZus{}MAP} \PY{o}{=} \PY{n}{Map}\PY{p}{(}\PY{p}{)} +EMPTY_MAP = Map() -\PY{k}{class} \PY{n+nc}{Instance}\PY{p}{(}\PY{n+nb}{object}\PY{p}{)}\PY{p}{:} - \PY{k}{def} \PY{n+nf}{\PYZus{}\PYZus{}init\PYZus{}\PYZus{}}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{cls}\PY{p}{)}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls} \PY{o}{=} \PY{n}{cls} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n}{EMPTY\PYZus{}MAP} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage} \PY{o}{=} \PY{p}{[}\PY{p}{]} +class Instance(object): + def __init__(self, cls): + self.cls = cls + self.map = EMPTY_MAP + self.storage = [] - \PY{k}{def} \PY{n+nf}{getfield}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} - \PY{k}{raise} \PY{n+ne}{AttributeError}\PY{p}{(}\PY{n}{name}\PY{p}{)} + def getfield(self, name): + map = hint(self.map, promote=True) + index = map.getindex(name) + if index != -1: + return self.storage[index] + raise AttributeError(name) - \PY{k}{def} \PY{n+nf}{write\PYZus{}attribute}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{,} \PY{n}{value}\PY{p}{)}\PY{p}{:} - \PY{n+nb}{map} \PY{o}{=} \PY{n}{hint}\PY{p}{(}\PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map}\PY{p}{,} \PY{n}{promote}\PY{o}{=}\PY{n+nb+bp}{True}\PY{p}{)} - \PY{n}{index} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{getindex}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{if} \PY{n}{index} \PY{o}{!=} \PY{o}{-}\PY{l+m+mi}{1}\PY{p}{:} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{p}{[}\PY{n}{index}\PY{p}{]} \PY{o}{=} \PY{n}{value} - \PY{k}{return} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{map} \PY{o}{=} \PY{n+nb}{map}\PY{o}{.}\PY{n}{add\PYZus{}attribute}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{storage}\PY{o}{.}\PY{n}{append}\PY{p}{(}\PY{n}{value}\PY{p}{)} + def write_attribute(self, name, value): + map = hint(self.map, promote=True) + index = map.getindex(name) + if index != -1: + self.storage[index] = value + return + self.map = map.add_attribute(name) + self.storage.append(value) - \PY{k}{def} \PY{n+nf}{getattr}\PY{p}{(}\PY{n+nb+bp}{self}\PY{p}{,} \PY{n}{name}\PY{p}{)}\PY{p}{:} - \PY{k}{try}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{getfield}\PY{p}{(}\PY{n}{name}\PY{p}{)} - \PY{k}{except} \PY{n+ne}{AttributeError}\PY{p}{:} - \PY{k}{return} \PY{n+nb+bp}{self}\PY{o}{.}\PY{n}{cls}\PY{o}{.}\PY{n}{find\PYZus{}method}\PY{p}{(}\PY{n}{name}\PY{p}{)} -\end{Verbatim} + def getattr(self, name): + try: + return self.getfield(name) + except AttributeError: + return self.cls.find_method(name) +\end{lstlisting} diff --git a/talk/icooolps2011/code/trace4.tex b/talk/icooolps2011/code/trace4.tex --- a/talk/icooolps2011/code/trace4.tex +++ b/talk/icooolps2011/code/trace4.tex @@ -1,37 +1,37 @@ -\begin{Verbatim}[commandchars=\\\{\}] +\begin{lstlisting}[escapechar=|,mathescape,basicstyle=\ttfamily] # inst.getattr("a") map1 = inst.map guard(map1 == 0xb74af4a8) -{\color{gray}index1 = Map.getindex(map1, "a")} -{\color{gray}guard(index1 != -1)} +|{\color{gray}index1 = Map.getindex(map1, "a")}| +|{\color{gray}guard(index1 != -1)}| storage1 = inst.storage result1 = storage1[index1] # inst.getattr("b") -{\color{gray}map2 = inst.map} -{\color{gray}guard(map2 == 0xb74af4a8)} -{\color{gray}index2 = Map.getindex(map2, "b")} -{\color{gray}guard(index2 == -1)} +|{\color{gray}map2 = inst.map}| +|{\color{gray}guard(map2 == 0xb74af4a8)}| +|{\color{gray}index2 = Map.getindex(map2, "b")}| +|{\color{gray}guard(index2 == -1)}| cls1 = inst.cls guard(cls1 == 0xb7aaaaf8) version1 = cls1.version guard(version1 == 0xb7bbbb18) -{\color{gray}result2 = Class._find_method(cls, "b", version1)} -{\color{gray}guard(result2 is not None)} +|{\color{gray}result2 = Class.\_find\_method(cls, "b", version1)}| +|{\color{gray}guard(result2 is not None)}| v2 = result1 + result2 # inst.getattr("c") -{\color{gray}map3 = inst.map} -{\color{gray}guard(map3 == 0xb74af4a8)} -{\color{gray}index3 = Map.getindex(map3, "c")} -{\color{gray}guard(index3 == -1)} -{\color{gray}cls2 = inst.cls} -{\color{gray}guard(cls2 == 0xb7aaaaf8)} -{\color{gray}version2 = cls2.version} -{\color{gray}guard(version2 == 0xb7bbbb18)} -{\color{gray}result3 = Class._find_method(cls, "c", version2)} -{\color{gray}guard(result3 is not None)} +|{\color{gray}map3 = inst.map}| +|{\color{gray}guard(map3 == 0xb74af4a8)}| +|{\color{gray}index3 = Map.getindex(map3, "c")}| +|{\color{gray}guard(index3 == -1)}| +|{\color{gray}cls2 = inst.cls}| +|{\color{gray}guard(cls2 == 0xb7aaaaf8)}| +|{\color{gray}version2 = cls2.version}| +|{\color{gray}guard(version2 == 0xb7bbbb18)}| +|{\color{gray}result3 = Class.\_find\_method(cls, "c", version2)}| +|{\color{gray}guard(result3 is not None)}| v4 = v2 + result3 return(v4) -\end{Verbatim} +\end{lstlisting} diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex --- a/talk/icooolps2011/code/trace1.tex +++ b/talk/icooolps2011/code/trace1.tex @@ -1,4 +1,4 @@ -\begin{Verbatim} +\begin{lstlisting}[mathescape,basicstyle=\ttfamily] # inst.getattr("a") attributes1 = inst.attributes result1 = dict.get(attributes1, "a") @@ -25,4 +25,4 @@ v4 = v2 + result3 return(v4) -\end{Verbatim} +\end{lstlisting} From commits-noreply at bitbucket.org Fri Mar 25 23:57:03 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:57:03 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: migrate to newer sigplan cls Message-ID: <20110325225703.520A42A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3407:f96dd9a6c1fc Date: 2011-03-25 23:10 +0100 http://bitbucket.org/pypy/extradoc/changeset/f96dd9a6c1fc/ Log: migrate to newer sigplan cls diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -1,24 +1,31 @@ -%\documentclass{acm_proc_article-sp} -\documentclass{sig-alternate} +\documentclass{sigplanconf} \usepackage{ifthen} \usepackage{fancyvrb} \usepackage{color} \usepackage{ulem} \usepackage{xspace} -\usepackage[scaled=0.8]{beramono} +\usepackage{epsfig} +\usepackage{amssymb} +\usepackage{amsmath} +\usepackage{amsfonts} \usepackage[utf8]{inputenc} \usepackage{setspace} + \usepackage{listings} -\input{code/style.tex} +\usepackage[T1]{fontenc} +\usepackage[scaled=0.8]{beramono} + + +\definecolor{commentgray}{rgb}{0.3,0.3,0.3} \lstset{ basicstyle=\ttfamily\footnotesize, language=Python, keywordstyle=\bfseries, stringstyle=\color{blue}, - commentstyle=\color{gray}\textit, + commentstyle=\color{commentgray}\textit, fancyvrb=true, showstringspaces=false, %keywords={def,while,if,elif,return,class,get,set,new,guard_class} @@ -26,7 +33,6 @@ numbersep = -20pt, } - \newboolean{showcomments} \setboolean{showcomments}{true} \ifthenelse{\boolean{showcomments}} @@ -70,20 +76,14 @@ \title{XXX in a Tracing JIT Compiler for Efficient Dynamic Languages} -\numberofauthors{4} -\author{ -\alignauthor Carl Friedrich Bolz\\ - \affaddr{University of Düsseldorf}\\ - \affaddr{STUPS Group}\\ - \affaddr{Germany}\\ - \email{cfbolz at gmx.de} -\alignauthor XXX - \affaddr{XXX}\\ - \email{XXX} -} -\conferenceinfo{ICOOOLPS}{'09 Genova, Italy} -\CopyrightYear{2009} -\crdata{978-1-60558-541-3/09/07} +\authorinfo{Carl Friedrich Bolz \and XXX} + {Heinrich-Heine-Universität Düsseldorf, STUPS Group, Germany + } + {cfbolz at gmx.de \and XXX} + +\conferenceinfo{ICOOOLPS}{'11 Lancaster, UK} +\CopyrightYear{2011} +\crdata{XXX} \maketitle diff --git a/talk/icooolps2011/sigplanconf.cls b/talk/icooolps2011/sigplanconf.cls new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/sigplanconf.cls @@ -0,0 +1,1250 @@ +%----------------------------------------------------------------------------- +% +% LaTeX Class/Style File +% +% Name: sigplanconf.cls +% Purpose: A LaTeX 2e class file for SIGPLAN conference proceedings. +% This class file supercedes acm_proc_article-sp, +% sig-alternate, and sigplan-proc. +% +% Author: Paul C. Anagnostopoulos +% Windfall Software +% 978 371-2316 +% paul at windfall.com +% +% Created: 12 September 2004 +% +% Revisions: See end of file. +% +%----------------------------------------------------------------------------- + + +\NeedsTeXFormat{LaTeX2e}[1995/12/01] +\ProvidesClass{sigplanconf}[2009/09/30 v2.3 ACM SIGPLAN Proceedings] + +% The following few pages contain LaTeX programming extensions adapted +% from the ZzTeX macro package. + +% Token Hackery +% ----- ------- + + +\def \@expandaftertwice {\expandafter\expandafter\expandafter} +\def \@expandafterthrice {\expandafter\expandafter\expandafter\expandafter + \expandafter\expandafter\expandafter} + +% This macro discards the next token. + +\def \@discardtok #1{}% token + +% This macro removes the `pt' following a dimension. + +{\catcode `\p = 12 \catcode `\t = 12 + +\gdef \@remover #1pt{#1} + +} % \catcode + +% This macro extracts the contents of a macro and returns it as plain text. +% Usage: \expandafter\@defof \meaning\macro\@mark + +\def \@defof #1:->#2\@mark{#2} + +% Control Sequence Names +% ------- -------- ----- + + +\def \@name #1{% {\tokens} + \csname \expandafter\@discardtok \string#1\endcsname} + +\def \@withname #1#2{% {\command}{\tokens} + \expandafter#1\csname \expandafter\@discardtok \string#2\endcsname} + +% Flags (Booleans) +% ----- ---------- + +% The boolean literals \@true and \@false are appropriate for use with +% the \if command, which tests the codes of the next two characters. + +\def \@true {TT} +\def \@false {FL} + +\def \@setflag #1=#2{\edef #1{#2}}% \flag = boolean + +% IF and Predicates +% -- --- ---------- + +% A "predicate" is a macro that returns \@true or \@false as its value. +% Such values are suitable for use with the \if conditional. For example: +% +% \if \@oddp{\x} \else \fi + +% A predicate can be used with \@setflag as follows: +% +% \@setflag \flag = {} + +% Here are the predicates for TeX's repertoire of conditional +% commands. These might be more appropriately interspersed with +% other definitions in this module, but what the heck. +% Some additional "obvious" predicates are defined. + +\def \@eqlp #1#2{\ifnum #1 = #2\@true \else \@false \fi} +\def \@neqlp #1#2{\ifnum #1 = #2\@false \else \@true \fi} +\def \@lssp #1#2{\ifnum #1 < #2\@true \else \@false \fi} +\def \@gtrp #1#2{\ifnum #1 > #2\@true \else \@false \fi} +\def \@zerop #1{\ifnum #1 = 0\@true \else \@false \fi} +\def \@onep #1{\ifnum #1 = 1\@true \else \@false \fi} +\def \@posp #1{\ifnum #1 > 0\@true \else \@false \fi} +\def \@negp #1{\ifnum #1 < 0\@true \else \@false \fi} +\def \@oddp #1{\ifodd #1\@true \else \@false \fi} +\def \@evenp #1{\ifodd #1\@false \else \@true \fi} +\def \@rangep #1#2#3{\if \@orp{\@lssp{#1}{#2}}{\@gtrp{#1}{#3}}\@false \else + \@true \fi} +\def \@tensp #1{\@rangep{#1}{10}{19}} + +\def \@dimeqlp #1#2{\ifdim #1 = #2\@true \else \@false \fi} +\def \@dimneqlp #1#2{\ifdim #1 = #2\@false \else \@true \fi} +\def \@dimlssp #1#2{\ifdim #1 < #2\@true \else \@false \fi} +\def \@dimgtrp #1#2{\ifdim #1 > #2\@true \else \@false \fi} +\def \@dimzerop #1{\ifdim #1 = 0pt\@true \else \@false \fi} +\def \@dimposp #1{\ifdim #1 > 0pt\@true \else \@false \fi} +\def \@dimnegp #1{\ifdim #1 < 0pt\@true \else \@false \fi} + +\def \@vmodep {\ifvmode \@true \else \@false \fi} +\def \@hmodep {\ifhmode \@true \else \@false \fi} +\def \@mathmodep {\ifmmode \@true \else \@false \fi} +\def \@textmodep {\ifmmode \@false \else \@true \fi} +\def \@innermodep {\ifinner \@true \else \@false \fi} + +\long\def \@codeeqlp #1#2{\if #1#2\@true \else \@false \fi} + +\long\def \@cateqlp #1#2{\ifcat #1#2\@true \else \@false \fi} + +\long\def \@tokeqlp #1#2{\ifx #1#2\@true \else \@false \fi} +\long\def \@xtokeqlp #1#2{\expandafter\ifx #1#2\@true \else \@false \fi} + +\long\def \@definedp #1{% + \expandafter\ifx \csname \expandafter\@discardtok \string#1\endcsname + \relax \@false \else \@true \fi} + +\long\def \@undefinedp #1{% + \expandafter\ifx \csname \expandafter\@discardtok \string#1\endcsname + \relax \@true \else \@false \fi} + +\def \@emptydefp #1{\ifx #1\@empty \@true \else \@false \fi}% {\name} + +\let \@emptylistp = \@emptydefp + +\long\def \@emptyargp #1{% {#n} + \@empargp #1\@empargq\@mark} +\long\def \@empargp #1#2\@mark{% + \ifx #1\@empargq \@true \else \@false \fi} +\def \@empargq {\@empargq} + +\def \@emptytoksp #1{% {\tokenreg} + \expandafter\@emptoksp \the#1\@mark} + +\long\def \@emptoksp #1\@mark{\@emptyargp{#1}} + +\def \@voidboxp #1{\ifvoid #1\@true \else \@false \fi} +\def \@hboxp #1{\ifhbox #1\@true \else \@false \fi} +\def \@vboxp #1{\ifvbox #1\@true \else \@false \fi} + +\def \@eofp #1{\ifeof #1\@true \else \@false \fi} + + +% Flags can also be used as predicates, as in: +% +% \if \flaga \else \fi + + +% Now here we have predicates for the common logical operators. + +\def \@notp #1{\if #1\@false \else \@true \fi} + +\def \@andp #1#2{\if #1% + \if #2\@true \else \@false \fi + \else + \@false + \fi} + +\def \@orp #1#2{\if #1% + \@true + \else + \if #2\@true \else \@false \fi + \fi} + +\def \@xorp #1#2{\if #1% + \if #2\@false \else \@true \fi + \else + \if #2\@true \else \@false \fi + \fi} + +% Arithmetic +% ---------- + +\def \@increment #1{\advance #1 by 1\relax}% {\count} + +\def \@decrement #1{\advance #1 by -1\relax}% {\count} + +% Options +% ------- + + +\@setflag \@authoryear = \@false +\@setflag \@blockstyle = \@false +\@setflag \@copyrightwanted = \@true +\@setflag \@explicitsize = \@false +\@setflag \@mathtime = \@false +\@setflag \@natbib = \@true +\@setflag \@ninepoint = \@true +\newcount{\@numheaddepth} \@numheaddepth = 3 +\@setflag \@onecolumn = \@false +\@setflag \@preprint = \@false +\@setflag \@reprint = \@false +\@setflag \@tenpoint = \@false +\@setflag \@times = \@false + +% Note that all the dangerous article class options are trapped. + +\DeclareOption{9pt}{\@setflag \@ninepoint = \@true + \@setflag \@explicitsize = \@true} + +\DeclareOption{10pt}{\PassOptionsToClass{10pt}{article}% + \@setflag \@ninepoint = \@false + \@setflag \@tenpoint = \@true + \@setflag \@explicitsize = \@true} + +\DeclareOption{11pt}{\PassOptionsToClass{11pt}{article}% + \@setflag \@ninepoint = \@false + \@setflag \@explicitsize = \@true} + +\DeclareOption{12pt}{\@unsupportedoption{12pt}} + +\DeclareOption{a4paper}{\@unsupportedoption{a4paper}} + +\DeclareOption{a5paper}{\@unsupportedoption{a5paper}} + +\DeclareOption{authoryear}{\@setflag \@authoryear = \@true} + +\DeclareOption{b5paper}{\@unsupportedoption{b5paper}} + +\DeclareOption{blockstyle}{\@setflag \@blockstyle = \@true} + +\DeclareOption{cm}{\@setflag \@times = \@false} + +\DeclareOption{computermodern}{\@setflag \@times = \@false} + +\DeclareOption{executivepaper}{\@unsupportedoption{executivepaper}} + +\DeclareOption{indentedstyle}{\@setflag \@blockstyle = \@false} + +\DeclareOption{landscape}{\@unsupportedoption{landscape}} + +\DeclareOption{legalpaper}{\@unsupportedoption{legalpaper}} + +\DeclareOption{letterpaper}{\@unsupportedoption{letterpaper}} + +\DeclareOption{mathtime}{\@setflag \@mathtime = \@true} + +\DeclareOption{natbib}{\@setflag \@natbib = \@true} + +\DeclareOption{nonatbib}{\@setflag \@natbib = \@false} + +\DeclareOption{nocopyrightspace}{\@setflag \@copyrightwanted = \@false} + +\DeclareOption{notitlepage}{\@unsupportedoption{notitlepage}} + +\DeclareOption{numberedpars}{\@numheaddepth = 4} + +\DeclareOption{numbers}{\@setflag \@authoryear = \@false} + +%%%\DeclareOption{onecolumn}{\@setflag \@onecolumn = \@true} + +\DeclareOption{preprint}{\@setflag \@preprint = \@true} + +\DeclareOption{reprint}{\@setflag \@reprint = \@true} + +\DeclareOption{times}{\@setflag \@times = \@true} + +\DeclareOption{titlepage}{\@unsupportedoption{titlepage}} + +\DeclareOption{twocolumn}{\@setflag \@onecolumn = \@false} + +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{article}} + +\ExecuteOptions{9pt,indentedstyle,times} +\@setflag \@explicitsize = \@false +\ProcessOptions + +\if \@onecolumn + \if \@notp{\@explicitsize}% + \@setflag \@ninepoint = \@false + \PassOptionsToClass{11pt}{article}% + \fi + \PassOptionsToClass{twoside,onecolumn}{article} +\else + \PassOptionsToClass{twoside,twocolumn}{article} +\fi +\LoadClass{article} + +\def \@unsupportedoption #1{% + \ClassError{proc}{The standard '#1' option is not supported.}} + +% This can be used with the 'reprint' option to get the final folios. + +\def \setpagenumber #1{% + \setcounter{page}{#1}} + +\AtEndDocument{\label{sigplanconf at finalpage}} + +% Utilities +% --------- + + +\newcommand{\setvspace}[2]{% + #1 = #2 + \advance #1 by -1\parskip} + +% Document Parameters +% -------- ---------- + + +% Page: + +\setlength{\hoffset}{-1in} +\setlength{\voffset}{-1in} + +\setlength{\topmargin}{1in} +\setlength{\headheight}{0pt} +\setlength{\headsep}{0pt} + +\if \@onecolumn + \setlength{\evensidemargin}{.75in} + \setlength{\oddsidemargin}{.75in} +\else + \setlength{\evensidemargin}{.75in} + \setlength{\oddsidemargin}{.75in} +\fi + +% Text area: + +\newdimen{\standardtextwidth} +\setlength{\standardtextwidth}{42pc} + +\if \@onecolumn + \setlength{\textwidth}{40.5pc} +\else + \setlength{\textwidth}{\standardtextwidth} +\fi + +\setlength{\topskip}{8pt} +\setlength{\columnsep}{2pc} +\setlength{\textheight}{54.5pc} + +% Running foot: + +\setlength{\footskip}{30pt} + +% Paragraphs: + +\if \@blockstyle + \setlength{\parskip}{5pt plus .1pt minus .5pt} + \setlength{\parindent}{0pt} +\else + \setlength{\parskip}{0pt} + \setlength{\parindent}{12pt} +\fi + +\setlength{\lineskip}{.5pt} +\setlength{\lineskiplimit}{\lineskip} + +\frenchspacing +\pretolerance = 400 +\tolerance = \pretolerance +\setlength{\emergencystretch}{5pt} +\clubpenalty = 10000 +\widowpenalty = 10000 +\setlength{\hfuzz}{.5pt} + +% Standard vertical spaces: + +\newskip{\standardvspace} +\setvspace{\standardvspace}{5pt plus 1pt minus .5pt} + +% Margin paragraphs: + +\setlength{\marginparwidth}{36pt} +\setlength{\marginparsep}{2pt} +\setlength{\marginparpush}{8pt} + + +\setlength{\skip\footins}{8pt plus 3pt minus 1pt} +\setlength{\footnotesep}{9pt} + +\renewcommand{\footnoterule}{% + \hrule width .5\columnwidth height .33pt depth 0pt} + +\renewcommand{\@makefntext}[1]{% + \noindent \@makefnmark \hspace{1pt}#1} + +% Floats: + +\setcounter{topnumber}{4} +\setcounter{bottomnumber}{1} +\setcounter{totalnumber}{4} + +\renewcommand{\fps at figure}{tp} +\renewcommand{\fps at table}{tp} +\renewcommand{\topfraction}{0.90} +\renewcommand{\bottomfraction}{0.30} +\renewcommand{\textfraction}{0.10} +\renewcommand{\floatpagefraction}{0.75} + +\setcounter{dbltopnumber}{4} + +\renewcommand{\dbltopfraction}{\topfraction} +\renewcommand{\dblfloatpagefraction}{\floatpagefraction} + +\setlength{\floatsep}{18pt plus 4pt minus 2pt} +\setlength{\textfloatsep}{18pt plus 4pt minus 3pt} +\setlength{\intextsep}{10pt plus 4pt minus 3pt} + +\setlength{\dblfloatsep}{18pt plus 4pt minus 2pt} +\setlength{\dbltextfloatsep}{20pt plus 4pt minus 3pt} + +% Miscellaneous: + +\errorcontextlines = 5 + +% Fonts +% ----- + + +\if \@times + \renewcommand{\rmdefault}{ptm}% + \if \@mathtime + \usepackage[mtbold,noTS1]{mathtime}% + \else +%%% \usepackage{mathptm}% + \fi +\else + \relax +\fi + +\if \@ninepoint + +\renewcommand{\normalsize}{% + \@setfontsize{\normalsize}{9pt}{10pt}% + \setlength{\abovedisplayskip}{5pt plus 1pt minus .5pt}% + \setlength{\belowdisplayskip}{\abovedisplayskip}% + \setlength{\abovedisplayshortskip}{3pt plus 1pt minus 2pt}% + \setlength{\belowdisplayshortskip}{\abovedisplayshortskip}} + +\renewcommand{\tiny}{\@setfontsize{\tiny}{5pt}{6pt}} + +\renewcommand{\scriptsize}{\@setfontsize{\scriptsize}{7pt}{8pt}} + +\renewcommand{\small}{% + \@setfontsize{\small}{8pt}{9pt}% + \setlength{\abovedisplayskip}{4pt plus 1pt minus 1pt}% + \setlength{\belowdisplayskip}{\abovedisplayskip}% + \setlength{\abovedisplayshortskip}{2pt plus 1pt}% + \setlength{\belowdisplayshortskip}{\abovedisplayshortskip}} + +\renewcommand{\footnotesize}{% + \@setfontsize{\footnotesize}{8pt}{9pt}% + \setlength{\abovedisplayskip}{4pt plus 1pt minus .5pt}% + \setlength{\belowdisplayskip}{\abovedisplayskip}% + \setlength{\abovedisplayshortskip}{2pt plus 1pt}% + \setlength{\belowdisplayshortskip}{\abovedisplayshortskip}} + +\renewcommand{\large}{\@setfontsize{\large}{11pt}{13pt}} + +\renewcommand{\Large}{\@setfontsize{\Large}{14pt}{18pt}} + +\renewcommand{\LARGE}{\@setfontsize{\LARGE}{18pt}{20pt}} + +\renewcommand{\huge}{\@setfontsize{\huge}{20pt}{25pt}} + +\renewcommand{\Huge}{\@setfontsize{\Huge}{25pt}{30pt}} + +\else\if \@tenpoint + +\relax + +\else + +\relax + +\fi\fi + +% Abstract +% -------- + + +\renewenvironment{abstract}{% + \section*{Abstract}% + \normalsize}{% + } + +% Bibliography +% ------------ + + +\renewenvironment{thebibliography}[1] + {\section*{\refname + \@mkboth{\MakeUppercase\refname}{\MakeUppercase\refname}}% + \list{\@biblabel{\@arabic\c at enumiv}}% + {\settowidth\labelwidth{\@biblabel{#1}}% + \leftmargin\labelwidth + \advance\leftmargin\labelsep + \@openbib at code + \usecounter{enumiv}% + \let\p at enumiv\@empty + \renewcommand\theenumiv{\@arabic\c at enumiv}}% + \bibfont + \clubpenalty4000 + \@clubpenalty \clubpenalty + \widowpenalty4000% + \sfcode`\.\@m} + {\def\@noitemerr + {\@latex at warning{Empty `thebibliography' environment}}% + \endlist} + +\if \@natbib + +\if \@authoryear + \typeout{Using natbib package with 'authoryear' citation style.} + \usepackage[authoryear,sort,square]{natbib} + \bibpunct{[}{]}{;}{a}{}{,} % Change citation separator to semicolon, + % eliminate comma between author and year. + \let \cite = \citep +\else + \typeout{Using natbib package with 'numbers' citation style.} + \usepackage[numbers,sort&compress,square]{natbib} +\fi +\setlength{\bibsep}{3pt plus .5pt minus .25pt} + +\fi + +\def \bibfont {\small} + +% Categories +% ---------- + + +\@setflag \@firstcategory = \@true + +\newcommand{\category}[3]{% + \if \@firstcategory + \paragraph*{Categories and Subject Descriptors}% + \@setflag \@firstcategory = \@false + \else + \unskip ;\hspace{.75em}% + \fi + \@ifnextchar [{\@category{#1}{#2}{#3}}{\@category{#1}{#2}{#3}[]}} + +\def \@category #1#2#3[#4]{% + {\let \and = \relax + #1 [\textit{#2}]% + \if \@emptyargp{#4}% + \if \@notp{\@emptyargp{#3}}: #3\fi + \else + :\space + \if \@notp{\@emptyargp{#3}}#3---\fi + \textrm{#4}% + \fi}} + +% Copyright Notice +% --------- ------ + + +\def \ftype at copyrightbox {8} +\def \@toappear {} +\def \@permission {} +\def \@reprintprice {} + +\def \@copyrightspace {% + \@float{copyrightbox}[b]% + \vbox to 1in{% + \vfill + \parbox[b]{20pc}{% + \scriptsize + \if \@preprint + [Copyright notice will appear here + once 'preprint' option is removed.]\par + \else + \@toappear + \fi + \if \@reprint + \noindent Reprinted from \@conferencename, + \@proceedings, + \@conferenceinfo, + pp.~\number\thepage--\pageref{sigplanconf at finalpage}.\par + \fi}}% + \end at float} + +\long\def \toappear #1{% + \def \@toappear {#1}} + +\toappear{% + \noindent \@permission \par + \vspace{2pt} + \noindent \textsl{\@conferencename}\quad \@conferenceinfo \par + \noindent Copyright \copyright\ \@copyrightyear\ ACM \@copyrightdata + \dots \@reprintprice\par} + +\newcommand{\permission}[1]{% + \gdef \@permission {#1}} + +\permission{% + Permission to make digital or hard copies of all or + part of this work for personal or classroom use is granted without + fee provided that copies are not made or distributed for profit or + commercial advantage and that copies bear this notice and the full + citation on the first page. To copy otherwise, to republish, to + post on servers or to redistribute to lists, requires prior specific + permission and/or a fee.} + +% Here we have some alternate permission statements and copyright lines: + +\newcommand{\ACMCanadapermission}{% + \permission{% + Copyright \@copyrightyear\ Association for Computing Machinery. + ACM acknowledges that + this contribution was authored or co-authored by an affiliate of the + National Research Council of Canada (NRC). + As such, the Crown in Right of + Canada retains an equal interest in the copyright, however granting + nonexclusive, royalty-free right to publish or reproduce this article, + or to allow others to do so, provided that clear attribution + is also given to the authors and the NRC.}} + +\newcommand{\ACMUSpermission}{% + \permission{% + Copyright \@copyrightyear\ Association for + Computing Machinery. ACM acknowledges that + this contribution was authored or co-authored + by a contractor or affiliate + of the U.S. Government. As such, the Government retains a nonexclusive, + royalty-free right to publish or reproduce this article, + or to allow others to do so, for Government purposes only.}} + +\newcommand{\authorpermission}{% + \permission{% + Copyright is held by the author/owner(s).} + \toappear{% + \noindent \@permission \par + \vspace{2pt} + \noindent \textsl{\@conferencename}\quad \@conferenceinfo \par + ACM \@copyrightdata.}} + +\newcommand{\Sunpermission}{% + \permission{% + Copyright is held by Sun Microsystems, Inc.}% + \toappear{% + \noindent \@permission \par + \vspace{2pt} + \noindent \textsl{\@conferencename}\quad \@conferenceinfo \par + ACM \@copyrightdata.}} + +\newcommand{\USpublicpermission}{% + \permission{% + This paper is authored by an employee(s) of the United States + Government and is in the public domain.}% + \toappear{% + \noindent \@permission \par + \vspace{2pt} + \noindent \textsl{\@conferencename}\quad \@conferenceinfo \par + ACM \@copyrightdata.}} + +\newcommand{\reprintprice}[1]{% + \gdef \@reprintprice {#1}} + +\reprintprice{\$10.00} + +% Enunciations +% ------------ + + +\def \@begintheorem #1#2{% {name}{number} + \trivlist + \item[\hskip \labelsep \textsc{#1 #2.}]% + \itshape\selectfont + \ignorespaces} + +\def \@opargbegintheorem #1#2#3{% {name}{number}{title} + \trivlist + \item[% + \hskip\labelsep \textsc{#1\ #2}% + \if \@notp{\@emptyargp{#3}}\nut (#3).\fi]% + \itshape\selectfont + \ignorespaces} + +% Figures +% ------- + + +\@setflag \@caprule = \@true + +\long\def \@makecaption #1#2{% + \addvspace{4pt} + \if \@caprule + \hrule width \hsize height .33pt + \vspace{4pt} + \fi + \setbox \@tempboxa = \hbox{\@setfigurenumber{#1.}\nut #2}% + \if \@dimgtrp{\wd\@tempboxa}{\hsize}% + \noindent \@setfigurenumber{#1.}\nut #2\par + \else + \centerline{\box\@tempboxa}% + \fi} + +\newcommand{\nocaptionrule}{% + \@setflag \@caprule = \@false} + +\def \@setfigurenumber #1{% + {\rmfamily \bfseries \selectfont #1}} + +% Hierarchy +% --------- + + +\setcounter{secnumdepth}{\@numheaddepth} + +\newskip{\@sectionaboveskip} +\setvspace{\@sectionaboveskip}{10pt plus 3pt minus 2pt} + +\newskip{\@sectionbelowskip} +\if \@blockstyle + \setlength{\@sectionbelowskip}{0.1pt}% +\else + \setlength{\@sectionbelowskip}{4pt}% +\fi + +\renewcommand{\section}{% + \@startsection + {section}% + {1}% + {0pt}% + {-\@sectionaboveskip}% + {\@sectionbelowskip}% + {\large \bfseries \raggedright}} + +\newskip{\@subsectionaboveskip} +\setvspace{\@subsectionaboveskip}{8pt plus 2pt minus 2pt} + +\newskip{\@subsectionbelowskip} +\if \@blockstyle + \setlength{\@subsectionbelowskip}{0.1pt}% +\else + \setlength{\@subsectionbelowskip}{4pt}% +\fi + +\renewcommand{\subsection}{% + \@startsection% + {subsection}% + {2}% + {0pt}% + {-\@subsectionaboveskip}% + {\@subsectionbelowskip}% + {\normalsize \bfseries \raggedright}} + +\renewcommand{\subsubsection}{% + \@startsection% + {subsubsection}% + {3}% + {0pt}% + {-\@subsectionaboveskip} + {\@subsectionbelowskip}% + {\normalsize \bfseries \raggedright}} + +\newskip{\@paragraphaboveskip} +\setvspace{\@paragraphaboveskip}{6pt plus 2pt minus 2pt} + +\renewcommand{\paragraph}{% + \@startsection% + {paragraph}% + {4}% + {0pt}% + {\@paragraphaboveskip} + {-1em}% + {\normalsize \bfseries \if \@times \itshape \fi}} + +\renewcommand{\subparagraph}{% + \@startsection% + {subparagraph}% + {4}% + {0pt}% + {\@paragraphaboveskip} + {-1em}% + {\normalsize \itshape}} + +% Standard headings: + +\newcommand{\acks}{\section*{Acknowledgments}} + +\newcommand{\keywords}{\paragraph*{Keywords}} + +\newcommand{\terms}{\paragraph*{General Terms}} + +% Identification +% -------------- + + +\def \@conferencename {} +\def \@conferenceinfo {} +\def \@copyrightyear {} +\def \@copyrightdata {[to be supplied]} +\def \@proceedings {[Unknown Proceedings]} + + +\newcommand{\conferenceinfo}[2]{% + \gdef \@conferencename {#1}% + \gdef \@conferenceinfo {#2}} + +\newcommand{\copyrightyear}[1]{% + \gdef \@copyrightyear {#1}} + +\let \CopyrightYear = \copyrightyear + +\newcommand{\copyrightdata}[1]{% + \gdef \@copyrightdata {#1}} + +\let \crdata = \copyrightdata + +\newcommand{\proceedings}[1]{% + \gdef \@proceedings {#1}} + +% Lists +% ----- + + +\setlength{\leftmargini}{13pt} +\setlength\leftmarginii{13pt} +\setlength\leftmarginiii{13pt} +\setlength\leftmarginiv{13pt} +\setlength{\labelsep}{3.5pt} + +\setlength{\topsep}{\standardvspace} +\if \@blockstyle + \setlength{\itemsep}{1pt} + \setlength{\parsep}{3pt} +\else + \setlength{\itemsep}{1pt} + \setlength{\parsep}{3pt} +\fi + +\renewcommand{\labelitemi}{{\small \centeroncapheight{\textbullet}}} +\renewcommand{\labelitemii}{\centeroncapheight{\rule{2.5pt}{2.5pt}}} +\renewcommand{\labelitemiii}{$-$} +\renewcommand{\labelitemiv}{{\Large \textperiodcentered}} + +\renewcommand{\@listi}{% + \leftmargin = \leftmargini + \listparindent = 0pt} +%%% \itemsep = 1pt +%%% \parsep = 3pt} +%%% \listparindent = \parindent} + +\let \@listI = \@listi + +\renewcommand{\@listii}{% + \leftmargin = \leftmarginii + \topsep = 1pt + \labelwidth = \leftmarginii + \advance \labelwidth by -\labelsep + \listparindent = \parindent} + +\renewcommand{\@listiii}{% + \leftmargin = \leftmarginiii + \labelwidth = \leftmarginiii + \advance \labelwidth by -\labelsep + \listparindent = \parindent} + +\renewcommand{\@listiv}{% + \leftmargin = \leftmarginiv + \labelwidth = \leftmarginiv + \advance \labelwidth by -\labelsep + \listparindent = \parindent} + +% Mathematics +% ----------- + + +\def \theequation {\arabic{equation}} + +% Miscellaneous +% ------------- + + +\newcommand{\balancecolumns}{% + \vfill\eject + \global\@colht = \textheight + \global\ht\@cclv = \textheight} + +\newcommand{\nut}{\hspace{.5em}} + +\newcommand{\softraggedright}{% + \let \\ = \@centercr + \leftskip = 0pt + \rightskip = 0pt plus 10pt} + +% Program Code +% ------- ---- + + +\newcommand{\mono}[1]{% + {\@tempdima = \fontdimen2\font + \texttt{\spaceskip = 1.1\@tempdima #1}}} + +% Running Heads and Feet +% ------- ----- --- ---- + + +\def \@preprintfooter {} + +\newcommand{\preprintfooter}[1]{% + \gdef \@preprintfooter {#1}} + +\if \@preprint + +\def \ps at plain {% + \let \@mkboth = \@gobbletwo + \let \@evenhead = \@empty + \def \@evenfoot {\scriptsize \textit{\@preprintfooter}\hfil \thepage \hfil + \textit{\@formatyear}}% + \let \@oddhead = \@empty + \let \@oddfoot = \@evenfoot} + +\else\if \@reprint + +\def \ps at plain {% + \let \@mkboth = \@gobbletwo + \let \@evenhead = \@empty + \def \@evenfoot {\scriptsize \hfil \thepage \hfil}% + \let \@oddhead = \@empty + \let \@oddfoot = \@evenfoot} + +\else + +\let \ps at plain = \ps at empty +\let \ps at headings = \ps at empty +\let \ps at myheadings = \ps at empty + +\fi\fi + +\def \@formatyear {% + \number\year/\number\month/\number\day} + +% Special Characters +% ------- ---------- + + +\DeclareRobustCommand{\euro}{% + \protect{\rlap{=}}{\sf \kern .1em C}} + +% Title Page +% ----- ---- + + +\@setflag \@addauthorsdone = \@false + +\def \@titletext {\@latex at error{No title was provided}{}} +\def \@subtitletext {} + +\newcount{\@authorcount} + +\newcount{\@titlenotecount} +\newtoks{\@titlenotetext} + +\def \@titlebanner {} + +\renewcommand{\title}[1]{% + \gdef \@titletext {#1}} + +\newcommand{\subtitle}[1]{% + \gdef \@subtitletext {#1}} + +\newcommand{\authorinfo}[3]{% {names}{affiliation}{email/URL} + \global\@increment \@authorcount + \@withname\gdef {\@authorname\romannumeral\@authorcount}{#1}% + \@withname\gdef {\@authoraffil\romannumeral\@authorcount}{#2}% + \@withname\gdef {\@authoremail\romannumeral\@authorcount}{#3}} + +\renewcommand{\author}[1]{% + \@latex at error{The \string\author\space command is obsolete; + use \string\authorinfo}{}} + +\newcommand{\titlebanner}[1]{% + \gdef \@titlebanner {#1}} + +\renewcommand{\maketitle}{% + \pagestyle{plain}% + \if \@onecolumn + {\hsize = \standardtextwidth + \@maketitle}% + \else + \twocolumn[\@maketitle]% + \fi + \@placetitlenotes + \if \@copyrightwanted \@copyrightspace \fi} + +\def \@maketitle {% + \begin{center} + \@settitlebanner + \let \thanks = \titlenote + {\leftskip = 0pt plus 0.25\linewidth + \rightskip = 0pt plus 0.25 \linewidth + \parfillskip = 0pt + \spaceskip = .7em + \noindent \LARGE \bfseries \@titletext \par} + \vskip 6pt + \noindent \Large \@subtitletext \par + \vskip 12pt + \ifcase \@authorcount + \@latex at error{No authors were specified for this paper}{}\or + \@titleauthors{i}{}{}\or + \@titleauthors{i}{ii}{}\or + \@titleauthors{i}{ii}{iii}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{viii}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{viii}{ix}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{viii}{ix}\@titleauthors{x}{}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{viii}{ix}\@titleauthors{x}{xi}{}\or + \@titleauthors{i}{ii}{iii}\@titleauthors{iv}{v}{vi}% + \@titleauthors{vii}{viii}{ix}\@titleauthors{x}{xi}{xii}% + \else + \@latex at error{Cannot handle more than 12 authors}{}% + \fi + \vspace{1.75pc} + \end{center}} + +\def \@settitlebanner {% + \if \@andp{\@preprint}{\@notp{\@emptydefp{\@titlebanner}}}% + \vbox to 0pt{% + \vskip -32pt + \noindent \textbf{\@titlebanner}\par + \vss}% + \nointerlineskip + \fi} + +\def \@titleauthors #1#2#3{% + \if \@andp{\@emptyargp{#2}}{\@emptyargp{#3}}% + \noindent \@setauthor{40pc}{#1}{\@false}\par + \else\if \@emptyargp{#3}% + \noindent \@setauthor{17pc}{#1}{\@false}\hspace{3pc}% + \@setauthor{17pc}{#2}{\@false}\par + \else + \noindent \@setauthor{12.5pc}{#1}{\@false}\hspace{2pc}% + \@setauthor{12.5pc}{#2}{\@false}\hspace{2pc}% + \@setauthor{12.5pc}{#3}{\@true}\par + \relax + \fi\fi + \vspace{20pt}} + +\def \@setauthor #1#2#3{% {width}{text}{unused} + \vtop{% + \def \and {% + \hspace{16pt}} + \hsize = #1 + \normalfont + \centering + \large \@name{\@authorname#2}\par + \vspace{5pt} + \normalsize \@name{\@authoraffil#2}\par + \vspace{2pt} + \textsf{\@name{\@authoremail#2}}\par}} + +\def \@maybetitlenote #1{% + \if \@andp{#1}{\@gtrp{\@authorcount}{3}}% + \titlenote{See page~\pageref{@addauthors} for additional authors.}% + \fi} + +\newtoks{\@fnmark} + +\newcommand{\titlenote}[1]{% + \global\@increment \@titlenotecount + \ifcase \@titlenotecount \relax \or + \@fnmark = {\ast}\or + \@fnmark = {\dagger}\or + \@fnmark = {\ddagger}\or + \@fnmark = {\S}\or + \@fnmark = {\P}\or + \@fnmark = {\ast\ast}% + \fi + \,$^{\the\@fnmark}$% + \edef \reserved at a {\noexpand\@appendtotext{% + \noexpand\@titlefootnote{\the\@fnmark}}}% + \reserved at a{#1}} + +\def \@appendtotext #1#2{% + \global\@titlenotetext = \expandafter{\the\@titlenotetext #1{#2}}} + +\newcount{\@authori} + +\iffalse +\def \additionalauthors {% + \if \@gtrp{\@authorcount}{3}% + \section{Additional Authors}% + \label{@addauthors}% + \noindent + \@authori = 4 + {\let \\ = ,% + \loop + \textbf{\@name{\@authorname\romannumeral\@authori}}, + \@name{\@authoraffil\romannumeral\@authori}, + email: \@name{\@authoremail\romannumeral\@authori}.% + \@increment \@authori + \if \@notp{\@gtrp{\@authori}{\@authorcount}} \repeat}% + \par + \fi + \global\@setflag \@addauthorsdone = \@true} +\fi + +\let \addauthorsection = \additionalauthors + +\def \@placetitlenotes { + \the\@titlenotetext} + +% Utilities +% --------- + + +\newcommand{\centeroncapheight}[1]{% + {\setbox\@tempboxa = \hbox{#1}% + \@measurecapheight{\@tempdima}% % Calculate ht(CAP) - ht(text) + \advance \@tempdima by -\ht\@tempboxa % ------------------ + \divide \@tempdima by 2 % 2 + \raise \@tempdima \box\@tempboxa}} + +\newbox{\@measbox} + +\def \@measurecapheight #1{% {\dimen} + \setbox\@measbox = \hbox{ABCDEFGHIJKLMNOPQRSTUVWXYZ}% + #1 = \ht\@measbox} + +\long\def \@titlefootnote #1#2{% + \insert\footins{% + \reset at font\footnotesize + \interlinepenalty\interfootnotelinepenalty + \splittopskip\footnotesep + \splitmaxdepth \dp\strutbox \floatingpenalty \@MM + \hsize\columnwidth \@parboxrestore +%%% \protected at edef\@currentlabel{% +%%% \csname p at footnote\endcsname\@thefnmark}% + \color at begingroup + \def \@makefnmark {$^{#1}$}% + \@makefntext{% + \rule\z@\footnotesep\ignorespaces#2\@finalstrut\strutbox}% + \color at endgroup}} + +% LaTeX Modifications +% ----- ------------- + +\def \@seccntformat #1{% + \@name{\the#1}% + \@expandaftertwice\@seccntformata \csname the#1\endcsname.\@mark + \quad} + +\def \@seccntformata #1.#2\@mark{% + \if \@emptyargp{#2}.\fi} + +% Revision History +% -------- ------- + + +% Date Person Ver. Change +% ---- ------ ---- ------ + +% 2004.09.12 PCA 0.1--5 Preliminary development. + +% 2004.11.18 PCA 0.5 Start beta testing. + +% 2004.11.19 PCA 0.6 Obsolete \author and replace with +% \authorinfo. +% Add 'nocopyrightspace' option. +% Compress article opener spacing. +% Add 'mathtime' option. +% Increase text height by 6 points. + +% 2004.11.28 PCA 0.7 Add 'cm/computermodern' options. +% Change default to Times text. + +% 2004.12.14 PCA 0.8 Remove use of mathptm.sty; it cannot +% coexist with latexsym or amssymb. + +% 2005.01.20 PCA 0.9 Rename class file to sigplanconf.cls. + +% 2005.03.05 PCA 0.91 Change default copyright data. + +% 2005.03.06 PCA 0.92 Add at-signs to some macro names. + +% 2005.03.07 PCA 0.93 The 'onecolumn' option defaults to '11pt', +% and it uses the full type width. + +% 2005.03.15 PCA 0.94 Add at-signs to more macro names. +% Allow margin paragraphs during review. + +% 2005.03.22 PCA 0.95 Implement \euro. +% Remove proof and newdef environments. + +% 2005.05.06 PCA 1.0 Eliminate 'onecolumn' option. +% Change footer to small italic and eliminate +% left portion if no \preprintfooter. +% Eliminate copyright notice if preprint. +% Clean up and shrink copyright box. + +% 2005.05.30 PCA 1.1 Add alternate permission statements. + +% 2005.06.29 PCA 1.1 Publish final first edition of guide. + +% 2005.07.14 PCA 1.2 Add \subparagraph. +% Use block paragraphs in lists, and adjust +% spacing between items and paragraphs. + +% 2006.06.22 PCA 1.3 Add 'reprint' option and associated +% commands. + +% 2006.08.24 PCA 1.4 Fix bug in \maketitle case command. + +% 2007.03.13 PCA 1.5 The title banner only displays with the +% 'preprint' option. + +% 2007.06.06 PCA 1.6 Use \bibfont in \thebibliography. +% Add 'natbib' option to load and configure +% the natbib package. + +% 2007.11.20 PCA 1.7 Balance line lengths in centered article +% title (thanks to Norman Ramsey). + +% 2009.01.26 PCA 1.8 Change natbib \bibpunct values. + +% 2009.03.24 PCA 1.9 Change natbib to use the 'numbers' option. +% Change templates to use 'natbib' option. + +% 2009.09.01 PCA 2.0 Add \reprintprice command (suggested by +% Stephen Chong). + +% 2009.09.08 PCA 2.1 Make 'natbib' the default; add 'nonatbib'. +% SB Add 'authoryear' and 'numbers' (default) to +% control citation style when using natbib. +% Add \bibpunct to change punctuation for +% 'authoryear' style. + +% 2009.09.21 PCA 2.2 Add \softraggedright to the thebibliography +% environment. Also add to template so it will +% happen with natbib. + +% 2009.09.30 PCA 2.3 Remove \softraggedright from thebibliography. +% Just include in the template. + From commits-noreply at bitbucket.org Fri Mar 25 23:57:04 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:57:04 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: trying to make the trace nicer, not sure I'll keep it Message-ID: <20110325225704.331592A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3408:e5e37b556d38 Date: 2011-03-25 23:27 +0100 http://bitbucket.org/pypy/extradoc/changeset/e5e37b556d38/ Log: trying to make the trace nicer, not sure I'll keep it diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex --- a/talk/icooolps2011/code/trace1.tex +++ b/talk/icooolps2011/code/trace1.tex @@ -1,28 +1,33 @@ \begin{lstlisting}[mathescape,basicstyle=\ttfamily] # inst.getattr("a") -attributes1 = inst.attributes -result1 = dict.get(attributes1, "a") -guard(result1 is not None) +# inside Instance.getfield +$attributes_1$ = inst.attributes +$result_1$ = dict.get($attributes_1$, "a") +guard($result_1$ is not None) # inst.getattr("b") -attributes2 = inst.attributes -v1 = dict.get(attributes2, "b") -guard(v1 is None) -cls1 = inst.cls -methods1 = cls.methods -result2 = dict.get(methods1, "b") -guard(result2 is not None) -v2 = result1 + result2 +# inside Instance.getfield +$attributes_2$ = inst.attributes +$v_1$ = dict.get($attributes_2$, "b") +guard($v_1$ is None) +# inside Class.find_method +$cls_1$ = inst.cls +$methods_1$ = cls.methods +$result_2$ = dict.get($methods_1$, "b") +guard($result_2$ is not None) +$v_2$ = $result_1$ + $result_2$ # inst.getattr("c") -attributes3 = inst.attributes -v3 = dict.get(attributes3, "c") -guard(v3 is None) -cls1 = inst.cls -methods2 = cls.methods -result3 = dict.get(methods2, "c") -guard(result3 is not None) +# inside Instance.getfield +$attributes_3$ = inst.attributes +$v_3$ = dict.get($attributes_3$, "c") +guard($v_3$ is None) +# inside Class.find_method +$cls_1$ = inst.cls +$methods_2$ = cls.methods +$result_3$ = dict.get($methods_2$, "c") +guard($result_3$ is not None) -v4 = v2 + result3 -return(v4) +$v_4$ = $v_2$ + $result_3$ +return($v_4$) \end{lstlisting} From commits-noreply at bitbucket.org Fri Mar 25 23:57:06 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:57:06 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: - simplify the code a bit Message-ID: <20110325225706.1D4D12A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3409:4f24d1d2b39b Date: 2011-03-25 23:35 +0100 http://bitbucket.org/pypy/extradoc/changeset/4f24d1d2b39b/ Log: - simplify the code a bit - delete the .py files now that the tex files are not generated any more diff --git a/talk/icooolps2011/code/version.tex b/talk/icooolps2011/code/version.tex --- a/talk/icooolps2011/code/version.tex +++ b/talk/icooolps2011/code/version.tex @@ -11,10 +11,7 @@ def find_method(self, name): self = hint(self, promote=True) version = hint(self.version, promote=True) - result = self._find_method(name, version) - if result is not None: - return result - raise AttributeError(name) + return self._find_method(name, version) @purefunction def _find_method(self, name, version): diff --git a/talk/icooolps2011/code/interpreter-slow.tex b/talk/icooolps2011/code/interpreter-slow.tex --- a/talk/icooolps2011/code/interpreter-slow.tex +++ b/talk/icooolps2011/code/interpreter-slow.tex @@ -1,4 +1,4 @@ -\begin{lstlisting}[mathescape,basicstyle=\ttfamily] +\begin{lstlisting}[mathescape,basicstyle=\ttfamily,numbers = right] class Class(object): def __init__(self, name): self.name = name @@ -8,10 +8,7 @@ return Instance(self) def find_method(self, name): - result = self.methods.get(name) - if result is not None: - return result - raise AttributeError(name) + return self.methods.get(name, None) def change_method(self, name, value): self.methods[name] = value @@ -23,17 +20,15 @@ self.attributes = {} def getfield(self, name): - result = self.attributes.get(name) - if result is not None: - return result - raise AttributeError(name) + return self.attributes.get(name, None) def write_attribute(self, name, value): self.attributes[name] = value def getattr(self, name): - try: - return self.getfield(name) - except AttributeError: - return self.cls.find_method(name) + result = self.getfield(name) + if result is None: + result = self.cls.find_method(name) + if result is None: + raise AttributeError \end{lstlisting} diff --git a/talk/icooolps2011/code/map.tex b/talk/icooolps2011/code/map.tex --- a/talk/icooolps2011/code/map.tex +++ b/talk/icooolps2011/code/map.tex @@ -30,7 +30,7 @@ index = map.getindex(name) if index != -1: return self.storage[index] - raise AttributeError(name) + return None def write_attribute(self, name, value): map = hint(self.map, promote=True) @@ -42,8 +42,5 @@ self.storage.append(value) def getattr(self, name): - try: - return self.getfield(name) - except AttributeError: - return self.cls.find_method(name) + ... # as before \end{lstlisting} diff --git a/talk/icooolps2011/code/map.py b/talk/icooolps2011/code/map.py deleted file mode 100644 --- a/talk/icooolps2011/code/map.py +++ /dev/null @@ -1,47 +0,0 @@ -class Map(object): - def __init__(self): - self.indexes = {} - self.other_maps = {} - - @purefunction - def getindex(self, name): - return self.indexes.get(name, -1) - - @purefunction - def add_attribute(self, name): - if name not in self.other_maps: - newmap = Map() - newmap.indexes.update(self.indexes) - newmap.indexes[name] = len(self.indexes) - self.other_maps[name] = newmap - return self.other_maps[name] - -EMPTY_MAP = Map() - -class Instance(object): - def __init__(self, cls): - self.cls = cls - self.map = EMPTY_MAP - self.storage = [] - - def getfield(self, name): - map = hint(self.map, promote=True) - index = map.getindex(name) - if index != -1: - return self.storage[index] - raise AttributeError(name) - - def write_attribute(self, name, value): - map = hint(self.map, promote=True) - index = map.getindex(name) - if index != -1: - self.storage[index] = value - return - self.map = map.add_attribute(name) - self.storage.append(value) - - def getattr(self, name): - try: - return self.getfield(name) - except AttributeError: - return self.cls.find_method(name) diff --git a/talk/icooolps2011/code/interpreter-slow.py b/talk/icooolps2011/code/interpreter-slow.py deleted file mode 100644 --- a/talk/icooolps2011/code/interpreter-slow.py +++ /dev/null @@ -1,37 +0,0 @@ -class Class(object): - def __init__(self, name): - self.name = name - self.methods = {} - - def instantiate(self): - return Instance(self) - - def find_method(self, name): - result = self.methods.get(name) - if result is not None: - return result - raise AttributeError(name) - - def change_method(self, name, value): - self.methods[name] = value - - -class Instance(object): - def __init__(self, cls): - self.cls = cls - self.attributes = {} - - def getfield(self, name): - result = self.attributes.get(name) - if result is not None: - return result - raise AttributeError(name) - - def write_attribute(self, name, value): - self.attributes[name] = value - - def getattr(self, name): - try: - return self.getfield(name) - except AttributeError: - return self.cls.find_method(name) diff --git a/talk/icooolps2011/code/version.py b/talk/icooolps2011/code/version.py deleted file mode 100644 --- a/talk/icooolps2011/code/version.py +++ /dev/null @@ -1,24 +0,0 @@ -class VersionTag(object): - pass - -class Class(object): - def __init__(self, name): - self.name = name - self.methods = {} - self.version = VersionTag() - - def find_method(self, name): - self = hint(self, promote=True) - version = hint(self.version, promote=True) - result = self._find_method(name, version) - if result is not None: - return result - raise AttributeError(name) - - @purefunction - def _find_method(self, name, version): - return self.methods.get(name) - - def change_method(self, name, value): - self.methods[name] = value - self.version = VersionTag() From commits-noreply at bitbucket.org Fri Mar 25 23:57:08 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Fri, 25 Mar 2011 23:57:08 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: add line numbers to the trace about where the operations originate from Message-ID: <20110325225708.E68CD2A2044@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3410:a12221c2727a Date: 2011-03-25 23:56 +0100 http://bitbucket.org/pypy/extradoc/changeset/a12221c2727a/ Log: add line numbers to the trace about where the operations originate from diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -324,7 +324,10 @@ The trace would look like in Figure~\ref{fig:trace1}. In this example, the attribute \texttt{a} is found on the instance, but the -attributes \texttt{b} and \texttt{c} are found on the class. The trace indeed contains +attributes \texttt{b} and \texttt{c} are found on the class. The numbers line +numbers in the trace correspond to the line numbers in +Figure~\ref{fig:interpreter-slow} where the traced operations come from. The +trace indeed contains five calls to \texttt{dict.get}, which is slow. To make the language efficient using a tracing JIT, we need to find a way to get rid of these dictionary lookups somehow. How to achieve this will be topic of diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex --- a/talk/icooolps2011/code/trace1.tex +++ b/talk/icooolps2011/code/trace1.tex @@ -1,33 +1,28 @@ -\begin{lstlisting}[mathescape,basicstyle=\ttfamily] -# inst.getattr("a") -# inside Instance.getfield -$attributes_1$ = inst.attributes -$result_1$ = dict.get($attributes_1$, "a") -guard($result_1$ is not None) +\begin{lstlisting}[mathescape,xleftmargin=20pt,numberblanklines=false,numbers=right,escapechar=|, firstnumber=27,basicstyle=\ttfamily] +# inst.getattr("a") |\setcounter{lstnumber}{21}| +$attributes_1$ = inst.attributes |\setcounter{lstnumber}{21}| +$result_1$ = dict.get($attributes_1$, "a") |\setcounter{lstnumber}{28}| +guard($result_1$ is not None) |\setcounter{lstnumber}{25}| -# inst.getattr("b") -# inside Instance.getfield -$attributes_2$ = inst.attributes -$v_1$ = dict.get($attributes_2$, "b") -guard($v_1$ is None) -# inside Class.find_method -$cls_1$ = inst.cls -$methods_1$ = cls.methods -$result_2$ = dict.get($methods_1$, "b") -guard($result_2$ is not None) -$v_2$ = $result_1$ + $result_2$ +# inst.getattr("b") |\setcounter{lstnumber}{21}| +$attributes_2$ = inst.attributes |\setcounter{lstnumber}{21}| +$v_1$ = dict.get($attributes_2$, "b") |\setcounter{lstnumber}{28}| +guard($v_1$ is None) |\setcounter{lstnumber}{29}| +$cls_1$ = inst.cls |\setcounter{lstnumber}{9}| +$methods_1$ = cls.methods |\setcounter{lstnumber}{9}| +$result_2$ = dict.get($methods_1$, "b") |\setcounter{lstnumber}{30}| +guard($result_2$ is not None) |\setcounter{lstnumber}{-2}| +$v_2$ = $result_1$ + $result_2$ |\setcounter{lstnumber}{25}| -# inst.getattr("c") -# inside Instance.getfield -$attributes_3$ = inst.attributes -$v_3$ = dict.get($attributes_3$, "c") -guard($v_3$ is None) -# inside Class.find_method -$cls_1$ = inst.cls -$methods_2$ = cls.methods -$result_3$ = dict.get($methods_2$, "c") -guard($result_3$ is not None) +# inst.getattr("c") |\setcounter{lstnumber}{21}| +$attributes_3$ = inst.attributes |\setcounter{lstnumber}{21}| +$v_3$ = dict.get($attributes_3$, "c") |\setcounter{lstnumber}{28}| +guard($v_3$ is None) |\setcounter{lstnumber}{29}| +$cls_1$ = inst.cls |\setcounter{lstnumber}{9}| +$methods_2$ = cls.methods |\setcounter{lstnumber}{9}| +$result_3$ = dict.get($methods_2$, "c") |\setcounter{lstnumber}{30}| +guard($result_3$ is not None) |\setcounter{lstnumber}{-3}| -$v_4$ = $v_2$ + $result_3$ +$v_4$ = $v_2$ + $result_3$ |\setcounter{lstnumber}{-2}| return($v_4$) -\end{lstlisting} +\end{lstlisting} % XXX find out how to not number lines From commits-noreply at bitbucket.org Sat Mar 26 00:01:07 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sat, 26 Mar 2011 00:01:07 +0100 (CET) Subject: [pypy-svn] pypy default: (lambacck) Implement PyExceptionInstance_Class, Message-ID: <20110325230107.7DBC3282BDD@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42950:daf495c3d9a5 Date: 2011-03-26 00:00 +0100 http://bitbucket.org/pypy/pypy/changeset/daf495c3d9a5/ Log: (lambacck) Implement PyExceptionInstance_Class, simplified because 2.7 strongly discourages old-style exceptions. diff --git a/pypy/module/cpyext/pyerrors.py b/pypy/module/cpyext/pyerrors.py --- a/pypy/module/cpyext/pyerrors.py +++ b/pypy/module/cpyext/pyerrors.py @@ -39,6 +39,10 @@ state = space.fromcache(State) state.clear_exception() + at cpython_api([PyObject], PyObject) +def PyExceptionInstance_Class(space, w_obj): + return space.type(w_obj) + @cpython_api([PyObjectP, PyObjectP, PyObjectP], lltype.Void) def PyErr_Fetch(space, ptype, pvalue, ptraceback): """Retrieve the error indicator into three variables whose addresses are passed. diff --git a/pypy/module/cpyext/test/test_pyerrors.py b/pypy/module/cpyext/test/test_pyerrors.py --- a/pypy/module/cpyext/test/test_pyerrors.py +++ b/pypy/module/cpyext/test/test_pyerrors.py @@ -96,6 +96,10 @@ out, err = capfd.readouterr() assert "Exception ValueError: 'message' in 'location' ignored" == err.strip() + def test_ExceptionInstance_Class(self, space, api): + instance = space.call_function(space.w_ValueError) + assert api.PyExceptionInstance_Class(instance) is space.w_ValueError + class AppTestFetch(AppTestCpythonExtensionBase): def setup_class(cls): AppTestCpythonExtensionBase.setup_class.im_func(cls) From commits-noreply at bitbucket.org Sat Mar 26 00:02:32 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:02:32 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: move conclusion bits around Message-ID: <20110325230232.0A7C0282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3411:fe4afbd8c8c0 Date: 2011-03-26 00:02 +0100 http://bitbucket.org/pypy/extradoc/changeset/fe4afbd8c8c0/ Log: move conclusion bits around diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -621,12 +621,6 @@ -\subsection{Conclusion} - -In this section we presented two hints that can be used in the source code -of the interpreter. They are used to influence what the optimizer does with the -trace. The examples given here are a bit too small, the next -section gives a worked-out example that puts all the pieces together. %___________________________________________________________________________ @@ -818,13 +812,6 @@ %___________________________________________________________________________ -\subsection{Conclusion} - -In this section we saw how to use \texttt{purefunction} and \texttt{promote} to make a -small but still relevant dynamic object model no longer use any dictionary lookups -after tracing. Instead a number of guards are inserted into the -trace to check whether the assumptions about the objects are still true. This -makes operations on objects seriously faster. \section{Evaluation} \label{sec:evaluation} @@ -833,6 +820,16 @@ \section{Conclusion and Next Steps} +In this paper we presented two hints that can be used in the source code of an +interpreter written with PyPy. They are used to influence what the optimizer +does with the trace. We also showed how a small but still relevant dynamic +object model can use these hints to no longer use any dictionary lookups after +tracing. Instead a number of guards are inserted into the trace to check whether +the assumptions about the objects are still true. This makes operations on +objects seriously faster. + +XXX + \section*{Acknowledgements} \bibliographystyle{abbrv} From commits-noreply at bitbucket.org Sat Mar 26 00:02:48 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:02:48 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: add pdf Message-ID: <20110325230248.8166A282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3412:65233b964723 Date: 2011-03-26 00:02 +0100 http://bitbucket.org/pypy/extradoc/changeset/65233b964723/ Log: add pdf diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf new file mode 100644 index 0000000000000000000000000000000000000000..d36331dd7582c79fa5ddda451e9e9eab656ca51c GIT binary patch [cut] From commits-noreply at bitbucket.org Sat Mar 26 00:14:52 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:14:52 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: fix references Message-ID: <20110325231452.26F072A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3413:20fe811b18ee Date: 2011-03-26 00:08 +0100 http://bitbucket.org/pypy/extradoc/changeset/20fe811b18ee/ Log: fix references diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -156,7 +156,7 @@ how it can be improved using these hints. \end{itemize} -The paper is structured as follows: Section~\ref{sec:background} gives an +The paper is structured as follows: Section~\ref{sec:Background} gives an introduction to the PyPy project and meta-tracing and presents an example of a tiny dynamic language object model. Section~\ref{sec:hints} presents the hints, what they do and how they are applied. Section~\ref{sec:fastobjmodel} shows how @@ -169,7 +169,7 @@ \label{sec:Background} \subsection{The PyPy Project} -\label{sect:pypy} +\label{sub:pypy} The PyPy project \cite{armin_rigo_pypys_2006} strives to be an environment where complex dynamic languages can be implemented efficiently. The approach taken @@ -194,7 +194,7 @@ %___________________________________________________________________________ \subsection{PyPy's Meta-Tracing JIT Compilers} -\label{sect:tracing} +\label{sub:tracing} A recently popular approach to JIT compilers is that of tracing JITs. Tracing JITs have their origin in the Dynamo project which used the for dynamic @@ -331,7 +331,7 @@ five calls to \texttt{dict.get}, which is slow. To make the language efficient using a tracing JIT, we need to find a way to get rid of these dictionary lookups somehow. How to achieve this will be topic of -Section~\ref{sec:putting}. +Section~\ref{sec:fastobjmodel}. From commits-noreply at bitbucket.org Sat Mar 26 00:14:52 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:14:52 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: change diagram to make it fit into one column to save space Message-ID: <20110325231452.E1FAA2A2033@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3414:46917a775e43 Date: 2011-03-26 00:14 +0100 http://bitbucket.org/pypy/extradoc/changeset/46917a775e43/ Log: change diagram to make it fit into one column to save space diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -244,11 +244,11 @@ the loop in the user function that is being considered. At this point, it can have traced many iterations of the interpreter main loop. -\begin{figure*} +\begin{figure} \includegraphics[scale=0.5]{figures/trace-levels} \caption{The levels involved in tracing} \label{fig:trace-levels} -\end{figure*} +\end{figure} Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left you see the levels of execution. The CPU executes the binary of diff --git a/talk/icooolps2011/figures/trace-levels.svg b/talk/icooolps2011/figures/trace-levels.svg --- a/talk/icooolps2011/figures/trace-levels.svg +++ b/talk/icooolps2011/figures/trace-levels.svg @@ -9,8 +9,8 @@ xmlns="http://www.w3.org/2000/svg" xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" - width="858.59668" - height="514.20831" + width="570.76489" + height="514.03967" id="svg2" version="1.1" inkscape:version="0.48.0 r9654" @@ -239,13 +239,13 @@ inkscape:pageopacity="0.0" inkscape:pageshadow="2" inkscape:zoom="0.98994949" - inkscape:cx="295.82388" - inkscape:cy="217.02929" + inkscape:cx="295.51511" + inkscape:cy="216.86068" inkscape:document-units="px" - inkscape:current-layer="layer1" + inkscape:current-layer="g3206-5" showgrid="false" - inkscape:window-width="1920" - inkscape:window-height="1170" + inkscape:window-width="1280" + inkscape:window-height="769" inkscape:window-x="0" inkscape:window-y="1" inkscape:window-maximized="1" @@ -262,7 +262,7 @@ image/svg+xml - + @@ -270,39 +270,35 @@ inkscape:label="Layer 1" inkscape:groupmode="layer" id="layer1" - transform="translate(-9.4277382,-314.88983)"> - - - CPU - + transform="translate(-9.7365118,-314.88983)"> + + CPU + style="color:#000000;fill:#cccccc;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:2.48563313;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate" /> Python Interpreter in RPython + style="color:#000000;fill:#ffffff;fill-opacity:1;fill-rule:nonzero;stroke:#000000;stroke-width:2.48563313;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-opacity:1;stroke-dasharray:none;stroke-dashoffset:0;marker:none;visibility:visible;display:inline;overflow:visible;enable-background:accumulate" /> @@ -396,7 +392,7 @@ + transform="translate(162.70701,-168.31179)"> + transform="translate(78.864353,-164.90068)"> f4 - - - - g - - - - string_concat - - - - - - - - import_helper1 - Trace for f1 - - - ... - ... ... @@ -757,7 +629,7 @@ x="697.14288" y="36.237335" style="font-style:normal;font-variant:normal;font-weight:normal;font-stretch:normal;font-family:DejaVu Sans Mono;-inkscape-font-specification:DejaVu Sans Mono" /> residual call ... Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3415:6855e11e3681 Date: 2011-03-26 00:17 +0100 http://bitbucket.org/pypy/extradoc/changeset/6855e11e3681/ Log: fix maciej's name diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -159,27 +159,13 @@ doi = {10.1145/1929501.1929508}, abstract = {The performance of many dynamic language implementations suffers from high allocation rates and runtime type checks. This makes dynamic languages less applicable to purely algorithmic problems, despite their growing popularity. In this paper we present a simple compiler optimization based on online partial evaluation to remove object allocations and runtime type checks in the context of a tracing {JIT.} We evaluate the optimization using a Python {VM} and find that it gives good results for all our (real-life) benchmarks.}, journal = {Proceedings of the 20th {ACM} {SIGPLAN} workshop on Partial evaluation and program manipulation}, - author = {Carl Friedrich Bolz and Antonio Cuni and Maciej {FijaBkowski} and Michael Leuschel and Samuele Pedroni and Armin Rigo}, + author = {Carl Friedrich Bolz and Antonio Cuni and Maciej Fija\l{}kowski and Michael Leuschel and Samuele Pedroni and Armin Rigo}, year = {2011}, note = {{ACM} {ID:} 1929508}, keywords = {code generation, experimentation, interpreters, languages, optimization, partial evaluation, performance, run-time environments, tracing jit}, pages = {43{\textendash}52} }, - at article{gal_trace-based_2009-1, - series = {{PLDI} '09}, - title = {Trace-based just-in-time type specialization for dynamic languages}, - location = {Dublin, Ireland}, - doi = {10.1145/1542476.1542528}, - abstract = {Dynamic languages such as {JavaScript} are more difficult to compile than statically typed ones. Since no concrete type information is available, traditional compilers need to emit generic code that can handle all possible type combinations at runtime. We present an alternative compilation technique for dynamically-typed languages that identifies frequently executed loop traces at run-time and then generates machine code on the fly that is specialized for the actual dynamic types occurring on each path through the loop. Our method provides cheap inter-procedural type specialization, and an elegant and efficient way of incrementally compiling lazily discovered alternative paths through nested loops. We have implemented a dynamic compiler for {JavaScript} based on our technique and we have measured speedups of 10x and more for certain benchmark programs.}, - journal = {{ACM} {SIGPLAN} Notices}, - author = {Andreas Gal and Brendan Eich and Mike Shaver and David Anderson and David Mandelin and Mohammad R Haghighat and Blake Kaplan and Graydon Hoare and Boris Zbarsky and Jason Orendorff and Jesse Ruderman and Edwin W Smith and Rick Reitmaier and Michael Bebenita and Mason Chang and Michael Franz}, - year = {2009}, - note = {{ACM} {ID:} 1542528}, - keywords = {code generation, design, dynamically typed languages, experimentation, incremental compilers, languages, measurement, performance, run-time environments, trace-based compilation}, - pages = {465{\textendash}478} -}, - @inproceedings{chang_tracing_2009, address = {Washington, {DC,} {USA}}, title = {Tracing for Web 3.0: Trace Compilation for the Next Generation Web Applications}, From commits-noreply at bitbucket.org Sat Mar 26 00:31:42 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:31:42 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: add a missing return, change_method -> write_method Message-ID: <20110325233142.10098282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3416:db05fa8a9f44 Date: 2011-03-26 00:22 +0100 http://bitbucket.org/pypy/extradoc/changeset/db05fa8a9f44/ Log: add a missing return, change_method -> write_method diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -756,7 +756,7 @@ checks that the class of \texttt{inst} is still the same. It will fail if the trace is executed with an instance of another class. The third guard checks that the class did not change since the trace was produced. It will fail if somebody -calls the \texttt{change\_method} method on the class. +calls the \texttt{write\_method} method on the class. %___________________________________________________________________________ diff --git a/talk/icooolps2011/code/version.tex b/talk/icooolps2011/code/version.tex --- a/talk/icooolps2011/code/version.tex +++ b/talk/icooolps2011/code/version.tex @@ -17,7 +17,7 @@ def _find_method(self, name, version): return self.methods.get(name) - def change_method(self, name, value): + def write_method(self, name, value): self.methods[name] = value self.version = VersionTag() \end{lstlisting} diff --git a/talk/icooolps2011/code/interpreter-slow.tex b/talk/icooolps2011/code/interpreter-slow.tex --- a/talk/icooolps2011/code/interpreter-slow.tex +++ b/talk/icooolps2011/code/interpreter-slow.tex @@ -10,7 +10,7 @@ def find_method(self, name): return self.methods.get(name, None) - def change_method(self, name, value): + def write_method(self, name, value): self.methods[name] = value @@ -31,4 +31,5 @@ result = self.cls.find_method(name) if result is None: raise AttributeError + return result \end{lstlisting} From commits-noreply at bitbucket.org Sat Mar 26 00:31:42 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:31:42 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: number input vars too Message-ID: <20110325233142.B1D02282BDE@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3417:1e9e03f824b6 Date: 2011-03-26 00:31 +0100 http://bitbucket.org/pypy/extradoc/changeset/1e9e03f824b6/ Log: number input vars too diff --git a/talk/icooolps2011/code/trace1.tex b/talk/icooolps2011/code/trace1.tex --- a/talk/icooolps2011/code/trace1.tex +++ b/talk/icooolps2011/code/trace1.tex @@ -1,24 +1,24 @@ \begin{lstlisting}[mathescape,xleftmargin=20pt,numberblanklines=false,numbers=right,escapechar=|, firstnumber=27,basicstyle=\ttfamily] -# inst.getattr("a") |\setcounter{lstnumber}{21}| -$attributes_1$ = inst.attributes |\setcounter{lstnumber}{21}| +# $inst_1$.getattr("a") |\setcounter{lstnumber}{21}| +$attributes_1$ = $inst_1$.attributes |\setcounter{lstnumber}{21}| $result_1$ = dict.get($attributes_1$, "a") |\setcounter{lstnumber}{28}| guard($result_1$ is not None) |\setcounter{lstnumber}{25}| -# inst.getattr("b") |\setcounter{lstnumber}{21}| -$attributes_2$ = inst.attributes |\setcounter{lstnumber}{21}| +# $inst_1$.getattr("b") |\setcounter{lstnumber}{21}| +$attributes_2$ = $inst_1$.attributes |\setcounter{lstnumber}{21}| $v_1$ = dict.get($attributes_2$, "b") |\setcounter{lstnumber}{28}| guard($v_1$ is None) |\setcounter{lstnumber}{29}| -$cls_1$ = inst.cls |\setcounter{lstnumber}{9}| +$cls_1$ = $inst_1$.cls |\setcounter{lstnumber}{9}| $methods_1$ = cls.methods |\setcounter{lstnumber}{9}| $result_2$ = dict.get($methods_1$, "b") |\setcounter{lstnumber}{30}| guard($result_2$ is not None) |\setcounter{lstnumber}{-2}| $v_2$ = $result_1$ + $result_2$ |\setcounter{lstnumber}{25}| -# inst.getattr("c") |\setcounter{lstnumber}{21}| -$attributes_3$ = inst.attributes |\setcounter{lstnumber}{21}| +# $inst_1$.getattr("c") |\setcounter{lstnumber}{21}| +$attributes_3$ = $inst_1$.attributes |\setcounter{lstnumber}{21}| $v_3$ = dict.get($attributes_3$, "c") |\setcounter{lstnumber}{28}| guard($v_3$ is None) |\setcounter{lstnumber}{29}| -$cls_1$ = inst.cls |\setcounter{lstnumber}{9}| +$cls_1$ = $inst_1$.cls |\setcounter{lstnumber}{9}| $methods_2$ = cls.methods |\setcounter{lstnumber}{9}| $result_3$ = dict.get($methods_2$, "c") |\setcounter{lstnumber}{30}| guard($result_3$ is not None) |\setcounter{lstnumber}{-3}| From commits-noreply at bitbucket.org Sat Mar 26 00:35:25 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 00:35:25 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: remove duplication Message-ID: <20110325233525.BEC14282BDE@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3418:4ea4073b2941 Date: 2011-03-26 00:35 +0100 http://bitbucket.org/pypy/extradoc/changeset/4ea4073b2941/ Log: remove duplication diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -130,13 +130,7 @@ bare meta-tracing. In this paper we present two of these hints that are extensively used in the -PyPy project to improve the performance of its Python interpreter. - The - -PyPy's hints go even further than SPUR's in that they provide the interpreter -author with a flexible toolset to make her implementation extremely efficient. -In this paper we present the two most prominent ones and show how classical -implementation techniques of dynamic languages can be expressed with them. These +PyPy project to improve the performance of its Python interpreter. These hints are used to control how the optimizer of the tracing JIT can improve the traces of the object model. More specifically, these hints influence the constant folding optimization. The first hint make it possible to turn arbitrary From commits-noreply at bitbucket.org Sat Mar 26 01:12:49 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Sat, 26 Mar 2011 01:12:49 +0100 (CET) Subject: [pypy-svn] pypy default: Add support for PyUnicode_FromUnicode(NULL, size), which allocates a (temporarily) mutable unicode string. Message-ID: <20110326001249.15375282B9C@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r42951:88b090e851cc Date: 2011-03-26 01:12 +0100 http://bitbucket.org/pypy/pypy/changeset/88b090e851cc/ Log: Add support for PyUnicode_FromUnicode(NULL, size), which allocates a (temporarily) mutable unicode string. Also implement PyUnicode_Resize. See comments in stringobject.py for a complete explanation diff --git a/pypy/module/cpyext/test/test_unicodeobject.py b/pypy/module/cpyext/test/test_unicodeobject.py --- a/pypy/module/cpyext/test/test_unicodeobject.py +++ b/pypy/module/cpyext/test/test_unicodeobject.py @@ -1,9 +1,81 @@ # encoding: iso-8859-15 from pypy.module.cpyext.test.test_api import BaseApiTest -from pypy.module.cpyext.unicodeobject import Py_UNICODE +from pypy.module.cpyext.test.test_cpyext import AppTestCpythonExtensionBase +from pypy.module.cpyext.unicodeobject import ( + Py_UNICODE, PyUnicodeObject, new_empty_unicode) +from pypy.module.cpyext.api import PyObjectP, PyObject +from pypy.module.cpyext.pyobject import Py_DecRef from pypy.rpython.lltypesystem import rffi, lltype import sys, py +class AppTestUnicodeObject(AppTestCpythonExtensionBase): + def test_unicodeobject(self): + module = self.import_extension('foo', [ + ("get_hello1", "METH_NOARGS", + """ + return PyUnicode_FromStringAndSize( + "Hello world", 11); + """), + ("test_GetSize", "METH_NOARGS", + """ + PyObject* s = PyUnicode_FromString("Hello world"); + int result = 0; + + if(PyUnicode_GetSize(s) == 11) { + result = 1; + } + if(s->ob_type->tp_basicsize != sizeof(void*)*4) + result = 0; + Py_DECREF(s); + return PyBool_FromLong(result); + """), + ("test_GetSize_exception", "METH_NOARGS", + """ + PyObject* f = PyFloat_FromDouble(1.0); + Py_ssize_t size = PyUnicode_GetSize(f); + + Py_DECREF(f); + return NULL; + """), + ("test_is_unicode", "METH_VARARGS", + """ + return PyBool_FromLong(PyUnicode_Check(PyTuple_GetItem(args, 0))); + """)]) + assert module.get_hello1() == u'Hello world' + assert module.test_GetSize() + raises(TypeError, module.test_GetSize_exception) + + assert module.test_is_unicode(u"") + assert not module.test_is_unicode(()) + + def test_unicode_buffer_init(self): + module = self.import_extension('foo', [ + ("getunicode", "METH_NOARGS", + """ + PyObject *s, *t; + Py_UNICODE* c; + Py_ssize_t len; + + s = PyUnicode_FromUnicode(NULL, 4); + if (s == NULL) + return NULL; + t = PyUnicode_FromUnicode(NULL, 3); + if (t == NULL) + return NULL; + Py_DECREF(t); + c = PyUnicode_AsUnicode(s); + c[0] = 'a'; + c[1] = 0xe9; + c[3] = 'c'; + return s; + """), + ]) + s = module.getunicode() + assert len(s) == 4 + assert s == u'a�\x00c' + + + class TestUnicode(BaseApiTest): def test_unicodeobject(self, space, api): assert api.PyUnicode_GET_SIZE(space.wrap(u'sp�m')) == 4 @@ -77,6 +149,28 @@ assert space.unwrap(w_res) == u'sp�' rffi.free_charp(s) + def test_unicode_resize(self, space, api): + py_uni = new_empty_unicode(space, 10) + ar = lltype.malloc(PyObjectP.TO, 1, flavor='raw') + py_uni.c_buffer[0] = u'a' + py_uni.c_buffer[1] = u'b' + py_uni.c_buffer[2] = u'c' + ar[0] = rffi.cast(PyObject, py_uni) + api.PyUnicode_Resize(ar, 3) + py_uni = rffi.cast(PyUnicodeObject, ar[0]) + assert py_uni.c_size == 3 + assert py_uni.c_buffer[1] == u'b' + assert py_uni.c_buffer[3] == u'\x00' + # the same for growing + ar[0] = rffi.cast(PyObject, py_uni) + api.PyUnicode_Resize(ar, 10) + py_uni = rffi.cast(PyUnicodeObject, ar[0]) + assert py_uni.c_size == 10 + assert py_uni.c_buffer[1] == 'b' + assert py_uni.c_buffer[10] == '\x00' + Py_DecRef(space, ar[0]) + lltype.free(ar, flavor='raw') + def test_AsUTF8String(self, space, api): w_u = space.wrap(u'sp�m') w_res = api.PyUnicode_AsUTF8String(w_u) @@ -235,13 +329,13 @@ x_chunk = api.PyUnicode_AS_UNICODE(w_x) api.Py_UNICODE_COPY(target_chunk, x_chunk, 4) - w_y = api.PyUnicode_FromUnicode(target_chunk, 4) + w_y = space.wrap(rffi.wcharpsize2unicode(target_chunk, 4)) assert space.eq_w(w_y, space.wrap(u"abcd")) size = api.PyUnicode_GET_SIZE(w_x) api.Py_UNICODE_COPY(target_chunk, x_chunk, size) - w_y = api.PyUnicode_FromUnicode(target_chunk, size) + w_y = space.wrap(rffi.wcharpsize2unicode(target_chunk, size)) assert space.eq_w(w_y, w_x) diff --git a/pypy/module/cpyext/stringobject.py b/pypy/module/cpyext/stringobject.py --- a/pypy/module/cpyext/stringobject.py +++ b/pypy/module/cpyext/stringobject.py @@ -15,7 +15,7 @@ ## The problem ## ----------- ## -## PyString_AsString() must returns a (non-movable) pointer to the underlying +## PyString_AsString() must return a (non-movable) pointer to the underlying ## buffer, whereas pypy strings are movable. C code may temporarily store ## this address and use it, as long as it owns a reference to the PyObject. ## There is no "release" function to specify that the pointer is not needed diff --git a/pypy/module/cpyext/unicodeobject.py b/pypy/module/cpyext/unicodeobject.py --- a/pypy/module/cpyext/unicodeobject.py +++ b/pypy/module/cpyext/unicodeobject.py @@ -7,15 +7,16 @@ bootstrap_function, PyObjectFields, cpython_struct, CONST_STRING, CONST_WSTRING) from pypy.module.cpyext.pyerrors import PyErr_BadArgument -from pypy.module.cpyext.pyobject import PyObject, from_ref, make_typedescr +from pypy.module.cpyext.pyobject import ( + PyObject, PyObjectP, Py_DecRef, make_ref, from_ref, track_reference, + make_typedescr, get_typedescr) from pypy.module.cpyext.stringobject import PyString_Check from pypy.module.sys.interp_encoding import setdefaultencoding from pypy.objspace.std import unicodeobject, unicodetype from pypy.rlib import runicode import sys -## See comment in stringobject.py. PyUnicode_FromUnicode(NULL, size) is not -## yet supported. +## See comment in stringobject.py. PyUnicodeObjectStruct = lltype.ForwardReference() PyUnicodeObject = lltype.Ptr(PyUnicodeObjectStruct) @@ -28,7 +29,8 @@ make_typedescr(space.w_unicode.instancetypedef, basestruct=PyUnicodeObject.TO, attach=unicode_attach, - dealloc=unicode_dealloc) + dealloc=unicode_dealloc, + realize=unicode_realize) # Buffer for the default encoding (used by PyUnicde_GetDefaultEncoding) DEFAULT_ENCODING_SIZE = 100 @@ -39,12 +41,39 @@ Py_UNICODE = lltype.UniChar +def new_empty_unicode(space, length): + """ + Allocatse a PyUnicodeObject and its buffer, but without a corresponding + interpreter object. The buffer may be mutated, until unicode_realize() is + called. + """ + typedescr = get_typedescr(space.w_unicode.instancetypedef) + py_obj = typedescr.allocate(space, space.w_unicode) + py_uni = rffi.cast(PyUnicodeObject, py_obj) + + buflen = length + 1 + py_uni.c_size = length + py_uni.c_buffer = lltype.malloc(rffi.CWCHARP.TO, buflen, + flavor='raw', zero=True) + return py_uni + def unicode_attach(space, py_obj, w_obj): "Fills a newly allocated PyUnicodeObject with a unicode string" py_unicode = rffi.cast(PyUnicodeObject, py_obj) py_unicode.c_size = len(space.unicode_w(w_obj)) py_unicode.c_buffer = lltype.nullptr(rffi.CWCHARP.TO) +def unicode_realize(space, py_obj): + """ + Creates the unicode in the interpreter. The PyUnicodeObject buffer must not + be modified after this call. + """ + py_uni = rffi.cast(PyUnicodeObject, py_obj) + s = rffi.wcharpsize2unicode(py_uni.c_buffer, py_uni.c_size) + w_obj = space.wrap(s) + track_reference(space, py_obj, w_obj) + return w_obj + @cpython_api([PyObject], lltype.Void, external=False) def unicode_dealloc(space, py_obj): py_unicode = rffi.cast(PyUnicodeObject, py_obj) @@ -128,7 +157,9 @@ def PyUnicode_AsUnicode(space, ref): """Return a read-only pointer to the Unicode object's internal Py_UNICODE buffer, NULL if unicode is not a Unicode object.""" - if not PyUnicode_Check(space, ref): + # Don't use PyUnicode_Check, it will realize the object :-( + w_type = from_ref(space, rffi.cast(PyObject, ref.c_ob_type)) + if not space.is_true(space.issubtype(w_type, space.w_unicode)): raise OperationError(space.w_TypeError, space.wrap("expected unicode object")) return PyUnicode_AS_UNICODE(space, ref) @@ -237,10 +268,11 @@ object. If the buffer is not NULL, the return value might be a shared object. Therefore, modification of the resulting Unicode object is only allowed when u is NULL.""" - if not wchar_p: - raise NotImplementedError - s = rffi.wcharpsize2unicode(wchar_p, length) - return space.wrap(s) + if wchar_p: + s = rffi.wcharpsize2unicode(wchar_p, length) + return make_ref(space, space.wrap(s)) + else: + return rffi.cast(PyObject, new_empty_unicode(space, length)) @cpython_api([CONST_WSTRING, Py_ssize_t], PyObject) def PyUnicode_FromWideChar(space, wchar_p, length): @@ -330,6 +362,29 @@ w_str = space.wrap(rffi.charpsize2str(s, size)) return space.call_method(w_str, 'decode', space.wrap("utf-8")) + at cpython_api([PyObjectP, Py_ssize_t], rffi.INT_real, error=-1) +def PyUnicode_Resize(space, ref, newsize): + # XXX always create a new string so far + py_uni = rffi.cast(PyUnicodeObject, ref[0]) + if not py_uni.c_buffer: + raise OperationError(space.w_SystemError, space.wrap( + "PyUnicode_Resize called on already created string")) + try: + py_newuni = new_empty_unicode(space, newsize) + except MemoryError: + Py_DecRef(space, ref[0]) + ref[0] = lltype.nullptr(PyObject.TO) + raise + to_cp = newsize + oldsize = py_uni.c_size + if oldsize < newsize: + to_cp = oldsize + for i in range(to_cp): + py_newuni.c_buffer[i] = py_uni.c_buffer[i] + Py_DecRef(space, ref[0]) + ref[0] = rffi.cast(PyObject, py_newuni) + return 0 + @cpython_api([PyObject], PyObject) def PyUnicode_AsUTF8String(space, w_unicode): """Encode a Unicode object using UTF-8 and return the result as Python string From commits-noreply at bitbucket.org Sat Mar 26 08:31:03 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 08:31:03 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: typos Message-ID: <20110326073103.E10A0282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42952:634efac749d4 Date: 2011-03-26 08:07 +0100 http://bitbucket.org/pypy/pypy/changeset/634efac749d4/ Log: typos diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -57,7 +57,7 @@ new = self.reconstruct_for_next_iteration(optimizer) if new is None: if force_if_needed: - new = optimizer.OptValue(self.force_box()) + new = OptValue(self.force_box()) else: return None valuemap[self] = new @@ -241,7 +241,8 @@ def turned_constant(self, value): pass - def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): + def reconstruct_for_next_iteration(self, surviving_boxes=None, + optimizer=None, valuemap=None): #return self.__class__() raise NotImplementedError From commits-noreply at bitbucket.org Sat Mar 26 08:31:05 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 08:31:05 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: enabled string optimizations in the preamble Message-ID: <20110326073105.20D60282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42953:4e31e5901de6 Date: 2011-03-26 08:14 +0100 http://bitbucket.org/pypy/pypy/changeset/4e31e5901de6/ Log: enabled string optimizations in the preamble diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -35,10 +35,6 @@ if name in enable_opts: if opt is not None: o = opt() - if unroll and name == 'string': - o.enabled = False - # FIXME: Workaround to disable string optimisation - # during preamble but to keep it during the loop optimizations.append(o) if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -4921,9 +4921,7 @@ """ # ---------- - def optimize_strunicode_loop(self, ops, optops, preamble=None): - if not preamble: - preamble = ops # FIXME: Force proper testing of preamble + def optimize_strunicode_loop(self, ops, optops, preamble): # check with the arguments passed in self.optimize_loop(ops, optops, preamble) # check with replacing 'str' with 'unicode' everywhere @@ -4943,7 +4941,7 @@ [i0] jump(i0) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_newstr_2(self): ops = """ @@ -4959,7 +4957,7 @@ [i0, i1] jump(i1, i0) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_1(self): ops = """ From commits-noreply at bitbucket.org Sat Mar 26 08:46:59 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 08:46:59 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: fixed a few test Message-ID: <20110326074659.6FA1D282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42954:731730deb79a Date: 2011-03-26 08:46 +0100 http://bitbucket.org/pypy/pypy/changeset/731730deb79a/ Log: fixed a few test diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -4978,7 +4978,7 @@ copystrcontent(p2, p3, 0, i4, i5) jump(p2, p3) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_vstr2_str(self): ops = """ @@ -5001,7 +5001,7 @@ copystrcontent(p2, p3, 0, 2, i4) jump(i1, i0, p3) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_str_vstr2(self): ops = """ @@ -5025,7 +5025,7 @@ i6 = int_add(i5, 1) # will be killed by the backend jump(i1, i0, p3) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_str_str_str(self): ops = """ @@ -5052,7 +5052,7 @@ copystrcontent(p3, p5, 0, i12b, i3b) jump(p2, p3, p5) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_str_cstr1(self): ops = """ @@ -5071,7 +5071,7 @@ i5 = int_add(i4, 1) # will be killed by the backend jump(p3) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_concat_consts(self): ops = """ @@ -5082,18 +5082,12 @@ escape(p3) jump() """ - preamble = """ - [] - p3 = call(0, s"ab", s"cde", descr=strconcatdescr) - escape(p3) - jump() - """ expected = """ [] escape(s"abcde") jump() """ - self.optimize_strunicode_loop(ops, expected, preamble) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_1(self): ops = """ @@ -5108,7 +5102,7 @@ copystrcontent(p1, p2, i1, 0, i3) jump(p2, i1, i2) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_2(self): ops = """ @@ -5122,7 +5116,7 @@ copystrcontent(p1, p2, 0, 0, i2) jump(p2, i2) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_3(self): ops = """ @@ -5140,7 +5134,7 @@ copystrcontent(p1, p3, i6, 0, i5) jump(p3, i1, i2, i3, i4) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_getitem1(self): ops = """ @@ -5158,7 +5152,7 @@ escape(i4) jump(p1, i1, i2, i3) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_plain(self): ops = """ @@ -5176,7 +5170,7 @@ escape(i4) jump(i3, i4) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_concat(self): ops = """ @@ -5197,7 +5191,7 @@ copystrcontent(p2, p4, 0, i3, i4b) jump(p4, i1, i2, p2) """ - self.optimize_strunicode_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_strgetitem_small(self): ops = """ @@ -5214,7 +5208,7 @@ i1 = strgetitem(p0, i0) jump(p0, i0) """ - self.optimize_loop(ops, expected) + self.optimize_loop(ops, expected, expected) def test_strlen_positive(self): ops = """ From commits-noreply at bitbucket.org Sat Mar 26 09:02:12 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 09:02:12 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: fixed str_equal tests Message-ID: <20110326080212.D1714282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42955:43999ed0c0e7 Date: 2011-03-26 09:01 +0100 http://bitbucket.org/pypy/pypy/changeset/43999ed0c0e7/ Log: fixed str_equal tests diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5242,7 +5242,7 @@ self.optimize_loop(ops, expected) # ---------- - def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble=None): + def optimize_strunicode_loop_extradescrs(self, ops, optops, preamble): from pypy.jit.metainterp.optimizeopt import string class FakeCallInfoCollection: def callinfo_for_oopspec(self, oopspecindex): @@ -5266,7 +5266,7 @@ escape(i0) jump(p1, p2) """ - self.optimize_strunicode_loop_extradescrs(ops, ops) + self.optimize_strunicode_loop_extradescrs(ops, ops, ops) def test_str_equal_noop2(self): ops = """ @@ -5291,7 +5291,7 @@ escape(i0) jump(p1, p2, p3) """ - self.optimize_strunicode_loop_extradescrs(ops, + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_slice1(self): @@ -5309,7 +5309,7 @@ escape(i0) jump(p1, i1, i2, p3) """ - self.optimize_strunicode_loop_extradescrs(ops, + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_slice2(self): @@ -5327,7 +5327,7 @@ escape(i0) jump(p1, i1, i2, p3) """ - self.optimize_strunicode_loop_extradescrs(ops, + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_slice3(self): @@ -5346,8 +5346,16 @@ escape(i0) jump(p1, i1, i2, p3) """ + preamble = """ + [p1, i1, i2, p3] + guard_nonnull(p3) [] + i4 = int_sub(i2, i1) + i0 = call(0, p1, i1, i4, p3, descr=streq_slice_nonnull_descr) + escape(i0) + jump(p1, i1, i2, p3) + """ self.optimize_strunicode_loop_extradescrs(ops, - expected, ops) + expected, preamble) def test_str_equal_slice4(self): ops = """ @@ -5364,7 +5372,7 @@ escape(i0) jump(p1, i1, i2) """ - self.optimize_strunicode_loop_extradescrs(ops, + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_slice5(self): @@ -5384,7 +5392,7 @@ escape(i0) jump(p1, i1, i2, i3) """ - self.optimize_strunicode_loop_extradescrs(ops, + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_none1(self): @@ -5400,7 +5408,7 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_none2(self): ops = """ @@ -5415,7 +5423,7 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_nonnull1(self): ops = """ @@ -5431,7 +5439,14 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + preamble = """ + [p1] + guard_nonnull(p1) [] + i0 = call(0, p1, s"hello world", descr=streq_nonnull_descr) + escape(i0) + jump(p1) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) def test_str_equal_nonnull2(self): ops = """ @@ -5448,7 +5463,15 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + preamble = """ + [p1] + guard_nonnull(p1) [] + i1 = strlen(p1) + i0 = int_eq(i1, 0) + escape(i0) + jump(p1) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) def test_str_equal_nonnull3(self): ops = """ @@ -5464,7 +5487,14 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + preamble = """ + [p1] + guard_nonnull(p1) [] + i0 = call(0, p1, 120, descr=streq_nonnull_char_descr) + escape(i0) + jump(p1) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) def test_str_equal_nonnull4(self): ops = """ @@ -5489,7 +5519,7 @@ escape(i0) jump(p1, p2) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_chars0(self): ops = """ @@ -5504,7 +5534,7 @@ escape(1) jump(i1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_chars1(self): ops = """ @@ -5515,13 +5545,18 @@ escape(i0) jump(i1) """ - expected = """ + preamble = """ [i1] i0 = int_eq(i1, 120) # ord('x') escape(i0) - jump(i1) - """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + jump(i1, i0) + """ + expected = """ + [i1, i0] + escape(i0) + jump(i1, i0) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) def test_str_equal_chars2(self): ops = """ @@ -5542,7 +5577,7 @@ escape(i0) jump(i1, i2) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_chars3(self): ops = """ @@ -5557,7 +5592,7 @@ escape(i0) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str_equal_lengthmismatch1(self): ops = """ @@ -5573,7 +5608,7 @@ escape(0) jump(i1) """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str2unicode_constant(self): ops = """ From commits-noreply at bitbucket.org Sat Mar 26 09:21:53 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 09:21:53 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: all tests passing Message-ID: <20110326082153.CF28D282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42956:d18dc2bec7b8 Date: 2011-03-26 09:21 +0100 http://bitbucket.org/pypy/pypy/changeset/d18dc2bec7b8/ Log: all tests passing diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5622,7 +5622,7 @@ escape(u"xy") jump() """ - self.optimize_strunicode_loop_extradescrs(ops, expected) + self.optimize_strunicode_loop_extradescrs(ops, expected, expected) def test_str2unicode_nonconstant(self): ops = """ @@ -5631,7 +5631,7 @@ escape(p1) jump(p1) """ - self.optimize_strunicode_loop_extradescrs(ops, ops) + self.optimize_strunicode_loop_extradescrs(ops, ops, ops) # more generally, supporting non-constant but virtual cases is # not obvious, because of the exception UnicodeDecodeError that # can be raised by ll_str2unicode() From commits-noreply at bitbucket.org Sat Mar 26 10:15:22 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 10:15:22 +0100 (CET) Subject: [pypy-svn] pypy jit-usable_retrace: hg merge default Message-ID: <20110326091522.83A6A282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42957:772b7ad6a7a6 Date: 2011-03-26 09:34 +0100 http://bitbucket.org/pypy/pypy/changeset/772b7ad6a7a6/ Log: hg merge default diff --git a/pypy/doc/config/objspace.std.optimized_int_add.txt b/pypy/doc/config/objspace.std.optimized_int_add.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_int_add.txt +++ /dev/null @@ -1,2 +0,0 @@ -Optimize the addition of two integers a bit. Enabling this option gives small -speedups. diff --git a/pypy/doc/config/objspace.std.txt b/pypy/doc/config/objspace.std.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/jit/metainterp/history.py b/pypy/jit/metainterp/history.py --- a/pypy/jit/metainterp/history.py +++ b/pypy/jit/metainterp/history.py @@ -522,7 +522,7 @@ def forget_value(self): self.value = 0 - + def clonebox(self): return BoxInt(self.value) @@ -788,6 +788,7 @@ inputvalues = None operations = None token = None + call_pure_results = None def __init__(self, name): self.name = name @@ -939,6 +940,9 @@ def add_new_loop(self, loop): pass + def record_aborted(self, greenkey): + pass + def view(self, **kwds): pass @@ -953,6 +957,7 @@ def __init__(self): self.loops = [] self.locations = [] + self.aborted_keys = [] def set_history(self, history): self.history = history @@ -975,6 +980,9 @@ def add_new_loop(self, loop): self.loops.append(loop) + def record_aborted(self, greenkey): + self.aborted_keys.append(greenkey) + # test read interface def get_all_loops(self): diff --git a/pypy/doc/config/objspace.nofaking.txt b/pypy/doc/config/objspace.nofaking.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.nofaking.txt +++ /dev/null @@ -1,7 +0,0 @@ -This options prevents the automagic borrowing of implementations of -modules and types not present in PyPy from CPython. - -As such, it is required when translating, as then there is no CPython -to borrow from. For running py.py it is useful for testing the -implementation of modules like "posix", but it makes everything even -slower than it is already. diff --git a/pypy/doc/config/objspace.std.withprebuiltchar.txt b/pypy/doc/config/objspace.std.withprebuiltchar.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.pyexpat.txt b/pypy/doc/config/objspace.usemodules.pyexpat.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pyexpat.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use (experimental) pyexpat module written in RPython, instead of CTypes -version which is used by default. diff --git a/pypy/doc/config/translation.gcrootfinder.txt b/pypy/doc/config/translation.gcrootfinder.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcrootfinder.txt +++ /dev/null @@ -1,15 +0,0 @@ -Choose method how to find roots in the GC. Boehm and refcounting have their own -methods, this is mostly only interesting for framework GCs. For those you have -a choice of various alternatives: - - - use a shadow stack (XXX link to paper), e.g. explicitly maintaining a stack - of roots - - - use stackless to find roots by unwinding the stack. Requires - :config:`translation.stackless`. Note that this turned out to - be slower than just using a shadow stack. - - - use GCC and i386 specific assembler hackery to find the roots on the stack. - This is fastest but platform specific. - - - Use LLVM's GC facilities to find the roots. diff --git a/pypy/doc/dev_method.txt b/pypy/doc/dev_method.txt deleted file mode 100644 --- a/pypy/doc/dev_method.txt +++ /dev/null @@ -1,360 +0,0 @@ -Distributed and agile development in PyPy -========================================= - -PyPy isn't just about producing code - it's also about how we produce code. -The challenges of coordinating work within a community and making sure it is -fused together with the parts of the project that is EU funded are tricky -indeed. Our aim is of course to make sure that the communities way of working -is disturbed as little as possible and that contributing to PyPy still feels -fun and interesting (;-) but also to try to show to the EU as well as other -funded projects that open source ideas, tools and methods are really good ways -of running development projects. So the way PyPy as a project is being run - -distributed and agile - is something we think might be of use to other open -source development projects and commercial projects. - -Main methods for achieving this is: - - * Sprint driven development - * Sync meetings - -Main tools for achieving this is: - - * py.test - automated testing - * Subversion - version control - * Transparent communication and documentation (mailinglists, IRC, tutorials - etc etc) - - -Sprint driven development: --------------------------- - -What is a sprint and why are we sprinting? - -Originally the sprint methodology used in the Python community grew from -practices within Zope3 development. The definition of a sprint is "two-day or -three-day focused development session, in which developers pair off together -in a room and focus on building a particular subsystem". - -Other typical sprint factors: - - * no more than 10 people (although other projects as well as PyPy haven been - noted to have more than that. This is the recommendation and it is - probably based on the idea of having a critical mass of people who can - interact/communicate and work without adding the need for more than just - the absolute necessary coordination time. The sprints during 2005 and 2006 have - been having ca 13-14 people per sprint, the highest number of participants - during a PyPy sprint has been 24 developers) - - * a coach (the coach is the "manager" of the sprint, he/she sets the goals, - prepares, leads and coordinate the work and track progress and makes this - visible for the team. Important to note here - PyPy have never had coaches - in our sprints. Instead we hold short status meetings in the whole group, - decisions are made in the same way. So far this have worked well and we - still have been able to achieve tremendous results under stressed - conditions, releases and such like. What we do have is a local organizer, - often a developer living in the area and one more developer who prepares - and organizes sprint. They do not "manage" the sprint when its started - - their role is more of the logistic nature. This doesn't mean that we wont - have use for the coach technique or something similar in the future). - - * only coding (this is a tough one. There have been projects who have used - the sprinting method to just visionalize och gather input. PyPy have had a - similar brainstorming start up sprint. So far though this is the official - line although again, if you visit a PyPy sprint we are doing quite a lot - of other small activities in subgroups as well - planning sprints, - documentation, coordinating our EU deliverables and evaluation etc. But - don't worry - our main focus is programming ;-) - - * using XP techniques (mainly pairprogramming and unit testing - PyPy is - leaning heavily on these aspects). Pairing up core developers with people - with different levels of knowledge of the codebase have had the results - that people can quite quickly get started and join in the development. - Many of our participants (new to the project and the codebase) have - expressed how pairprogramming in combination with working on the automated - tests have been a great way of getting started. This is of course also a - dilemma because our core developers might have to pair up to solve some - extra hairy problems which affects the structure and effect of the other - pairs. - -It is a method that fits distributed teams well because it gets the team -focused around clear (and challenging) goals while working collaborative -(pairprogramming, status meeting, discussions etc) as well as accelerated -(short increments and tasks, "doing" and testing instead of long start ups of -planning and requirement gathering). This means that most of the time a sprint -is a great way of getting results, but also to get new people acquainted with -the codebase. It is also a great method for dissemination and learning within -the team because of the pairprogramming. - -If sprinting is combined with actually moving around and having the sprint -close to the different active developer groups in the community as well as -during conferences like PyCon and EuroPython, the team will have an easier -task of recruiting new talents to the team. It also vitalizes the community -and increases the contact between the different Python implementation -projects. - -As always with methodologies you have to adapt them to fit your project (and -not the other way around which is much too common). The PyPy team have been -sprinting since early 2003 and have done 22 sprints so far, 19 in Europe, 2 -in the USA and 1 in Asia. Certain practices have proven to be more successful within this -team and those are the one we are summarizing here. - - -How is it done? -+++++++++++++++ - -There are several aspects of a sprint. In the PyPy team we focus on: -1. Content (goal) -2. Venue -3. Information -4. Process - -1. Content (goal) is discussed on mailinglists (pypy-dev) and on IRC ca one - month before the event. Beforehand we have some rough plans called "between - sprints" and the sprintplan is based on the status of those issues but also - with a focus on upcoming releases and deliverables. Usually its the core - developers who does this but the transparency and participation have - increased since we started with our weekly "pypy-sync meetings" on IRC. The - sync meetings in combination with a rough in between planning makes it - easier for other developer to follow the progress and thus participating in - setting goals for the upcoming sprints. - - The goal needs to be challenging or it won't rally the full effort of the - team, but it must not be unrealistic as that tends to be very frustrating - and dissatisfying. It is also very important to take into account the - participants when you set the goal for the sprint. If the sprint takes place - connected to a conference (or similar open events) the goals for the actual - coding progress should be set lower (or handled in another way) and focus - should shift to dissemination and getting new/interested people to a - certain understanding of the PyPy codebase. Setting the right goal and - making sure this is a shared one is important because it helps the - participants coming in with somewhat similar expectations ;-) - -2. Venue - in the PyPy project we have a rough view on where we are sprinting - a few months ahead. No detailed plans have been made that far in - advance. Knowing the dates and the venue makes flight bookings easier ;-) - The venue is much more important than one would think. We need to have a - somewhat comfortable environment to work in (where up to 15 people can sit - and work), this means tables and chairs, light and electricity outlets. Is - it a venue needing access cards so that only one person is allowed to open? - How long can you stay - 24 hours per day or does the landlord want the team - evacuated by 23:00? These are important questions that can gravely affect - the "feel and atmosphere" of the sprint as well as the desired results! - - Also, somewhat close to low cost places to eat and accommodate - participants. Facilities for making tea/coffee as well as some kind of - refrigerator for storing food. A permanent Internet connection is a must - - has the venue were the sprint is planned to be weird rules for access to - their network etc etc? - - Whiteboards are useful tools and good to have. Beamers (PyPy jargon for a projector) - are very useful for the status meetings and should be available, at least 1. The - project also owns one beamer - specifically for sprint purposes. - - The person making sure that the requirements for a good sprint venue is - being met should therefore have very good local connections or, preferably - live there. - -3. Information - discussions about content and goals (pre announcements) are - usually carried out on pypy-dev (mailinglist/IRC). All other info is - distributed via email on pypy-sprint mailinglist and as web pages on - codespeak. When dates, venue and content is fully decided a sprint - announcement is being made and sent out to pypy-dev and pypy-sprint as well - as more general purpose mailing lists like comp.lang.python and updated on - codespeak - this happens 2-4 weeks before the sprint. It's important that - the sprint announcements points to information about local transportation - (to the country and to the city and to the venue), currency issues, food - and restaurants etc. There are also webpages in which people announce when - they will arrive and where they are accommodated. - - The planning text for the sprint is updated up till the sprint and is then - used during the status meetings and between to track work. After the sprint - (or even better: in between so that the memory is fresh) a sprint report is - written by one of the developers and updated to codespeak, this is a kind - of summary of the entire sprint and it tells of the work done and the - people involved. - - One very important strategy when planning the venue is cost - efficiency. Keeping accommodation and food/travel costs as low as possible - makes sure that more people can afford to visit or join the sprint - fully. The partially EU funded parts of the project do have a so called sprint budget - which we use to try to help developers to participate in our sprints - (travel expenses and accommodation) and because most of the funding is so - called matched funding we pay for most of our expenses in our own - organizations and companies anyway. - - -4. Process - a typical PyPy sprint is 7 days with a break day in the - middle. Usually sprinters show up the day before the sprint starts. The - first day has a start up meeting, with tutorials if there are participants - new to the project or if some new tool or feature have been implemented. A - short presentation of the participants and their background and - expectations is also good to do. Unfortunately there is always time spent - the first day, mostly in the morning when people arrive to get the internet - and server infrastructure up and running. That is why we are, through - documentation_, trying to get participants to set up the tools and - configurations needed before they arrive to the sprint. - - Approximate hours being held are 10-17, but people tend to stay longer to - code during the evenings. A short status meeting starts up the day and work - is "paired" out according to need and wishes. The PyPy sprints are - developer and group driven, because we have no "coach" our status meetings - are very much group discussion while notes are taken and our planning texts - are updated. Also - the sprint is done (planned and executed) within the - developer group together with someone acquainted with the local region - (often a developer living there). So within the team there is no one - formally responsible for the sprints. - - Suggestions for off hours activities and social events for the break day is - a good way of emphasizing how important it is to take breaks - some - pointers in that direction from the local organizer is good. - - At the end of the sprint we do a technical summary (did we achieve the - goals/content), what should be a rough focus for the work until the next - sprint and the sprint wheel starts rolling again ;-) An important aspect is - also to evaluate the sprint with the participants. Mostly this is done via - emailed questions after the sprint, it could also be done as a short group - evaluation as well. The reason for evaluating is of course to get feedback - and to make sure that we are not missing opportunities to make our sprints - even more efficient and enjoyable. - - The main challenge of our sprint process is the fact that people show up - at different dates and leave at different dates. That affects the shared - introduction (goals/content, tutorials, presentations etc) and also the - closure - the technical summary etc. Here we are still struggling to find - some middle ground - thus increases the importance of feedback. - - -.. _documentation: getting-started.html - -Can I join in? -++++++++++++++ - -Of course. Just follow the work on pypy-dev and if you specifically are -interested in information about our sprints - subscribe to -pypy-sprint at codespeak.net and read the news on codespeak for announcements etc. - -If you think we should sprint in your town - send us an email - we are very -interested in using sprints as away of making contact with active developers -(Python/compiler design etc)! - -If you have questions about our sprints and EU-funding - please send an email -to pypy-funding at codespeak.net, our mailinglist for project coordination. - -Previous sprints? -+++++++++++++++++ - -The PyPy team has been sprinting on the following occasions:: - - * Hildesheim Feb 2003 - * Gothenburg May 2003 - * Europython/Louvain-La-Neuve June 2003 - * Berlin Sept 2003 - * Amsterdam Dec 2003 - * Europython/Gothenburg June 2004 - * Vilnius Nov 2004 - * Leysin Jan 2005 - * PyCon/Washington March 2005 - * Europython/Gothenburg June 2005 - * Hildesheim July 2005 - * Heidelberg Aug 2005 - * Paris Oct 2005 - * Gothenburg Dec 2005 - * Mallorca Jan 2006 - * PyCon/Dallas Feb 2006 - * Louvain-La-Neuve March 2006 - * Leysin April 2006 - * Tokyo April 2006 - * Düsseldorf June 2006 - * Europython/Geneva July 2006 - * Limerick Aug 2006 - * Düsseldorf Oct 2006 - * Leysin Jan 2007 - * Hildesheim Feb 2007 - -People who have participated and contributed during our sprints and thus -contributing to PyPy (if we have missed someone here - please contact us -so we can correct it): - - Armin Rigo - Holger Krekel - Samuele Pedroni - Christian Tismer - Laura Creighton - Jacob Hallén - Michael Hudson - Richard Emslie - Anders Chrigström - Alex Martelli - Ludovic Aubry - Adrien DiMascio - Nicholas Chauvat - Niklaus Haldimann - Anders Lehmann - Carl Friedrich Bolz - Eric Van Riet Paap - Stephan Diel - Dinu Gherman - Jens-Uwe Mager - Marcus Denker - Bert Freudenberg - Gunther Jantzen - Henrion Benjamin - Godefroid Chapelle - Anna Ravenscroft - Tomek Meka - Jonathan David Riehl - Patrick Maupain - Etienne Posthumus - Nicola Paolucci - Albertas Agejevas - Marius Gedminas - Jesus Cea Avion - Olivier Dormond - Jacek Generowicz - Brian Dorsey - Guido van Rossum - Bob Ippolito - Alan McIntyre - Lutz Paelike - Michael Chermside - Beatrice Düring - Boris Feigin - Amaury Forgeot d'Arc - Andrew Thompson - Valentino Volonghi - Aurelien Campeas - Stephan Busemann - Johan Hahn - Gerald Klix - Gene Oden - Josh Gilbert - Geroge Paci - Martin Blais - Stuart Williams - Jiwon Seo - Michael Twomey - Wanja Saatkamp - Alexandre Fayolle - Raphaël Collet - Grégoire Dooms - Sanghyeon Seo - Yutaka Niibe - Yusei Tahara - George Toshida - Koichi Sasada - Guido Wesdorp - Maciej Fijalkowski - Antonio Cuni - Lawrence Oluyede - Fabrizio Milo - Alexander Schremmer - David Douard - Michele Frettoli - Simon Burton - Aaron Bingham - Pieter Zieschang - Sad Rejeb - Brian Sutherland - Georg Brandl - - diff --git a/pypy/doc/config/objspace.std.mutable_builtintypes.txt b/pypy/doc/config/objspace.std.mutable_builtintypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.mutable_builtintypes.txt +++ /dev/null @@ -1,1 +0,0 @@ -Allow modification of builtin types. Disabled by default. diff --git a/pypy/doc/config/objspace.usemodules.crypt.txt b/pypy/doc/config/objspace.usemodules.crypt.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.crypt.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'crypt' module. -This module is expected to be fully working. diff --git a/pypy/jit/metainterp/optimize.py b/pypy/jit/metainterp/optimize.py --- a/pypy/jit/metainterp/optimize.py +++ b/pypy/jit/metainterp/optimize.py @@ -4,42 +4,44 @@ from pypy.jit.metainterp.optimizeopt import optimize_loop_1, optimize_bridge_1 -def optimize_loop(metainterp_sd, old_loop_tokens, loop): +def optimize_loop(metainterp_sd, old_loop_tokens, loop, enable_opts): debug_start("jit-optimize") try: - return _optimize_loop(metainterp_sd, old_loop_tokens, loop) + return _optimize_loop(metainterp_sd, old_loop_tokens, loop, + enable_opts) finally: debug_stop("jit-optimize") -def _optimize_loop(metainterp_sd, old_loop_tokens, loop): +def _optimize_loop(metainterp_sd, old_loop_tokens, loop, enable_opts): cpu = metainterp_sd.cpu metainterp_sd.logger_noopt.log_loop(loop.inputargs, loop.operations) # XXX do we really still need a list? if old_loop_tokens: return old_loop_tokens[0] - optimize_loop_1(metainterp_sd, loop) + optimize_loop_1(metainterp_sd, loop, enable_opts) return None # ____________________________________________________________ -def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, +def optimize_bridge(metainterp_sd, old_loop_tokens, bridge, enable_opts, inline_short_preamble=True, retraced=None): debug_start("jit-optimize") try: return _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, + enable_opts, inline_short_preamble, retraced) finally: debug_stop("jit-optimize") -def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, +def _optimize_bridge(metainterp_sd, old_loop_tokens, bridge, enable_opts, inline_short_preamble, retraced=None): cpu = metainterp_sd.cpu metainterp_sd.logger_noopt.log_loop(bridge.inputargs, bridge.operations) if old_loop_tokens: old_loop_token = old_loop_tokens[0] bridge.operations[-1].setdescr(old_loop_token) # patch jump target - optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble, - retraced) + optimize_bridge_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble, retraced) return old_loop_tokens[0] #return bridge.operations[-1].getdescr() return None diff --git a/pypy/doc/discussion/testing-zope.txt b/pypy/doc/discussion/testing-zope.txt deleted file mode 100644 --- a/pypy/doc/discussion/testing-zope.txt +++ /dev/null @@ -1,45 +0,0 @@ -Testing Zope on top of pypy-c -============================= - -Getting Zope packages ---------------------- - -If you don't have a full Zope installation, you can pick a Zope package, -check it out via Subversion, and get all its dependencies (replace -``$PKG`` with, for example, ``zope.interface``):: - - svn co svn://svn.zope.org/repos/main/$PKG/trunk $PKG - cd $PKG - python bootstrap.py - bin/buildout - bin/test - -Required pypy-c version ------------------------ - -You probably need a pypy-c built with --allworkingmodules, at least:: - - cd pypy/translator/goal - ./translate.py targetpypystandalone.py --allworkingmodules - -Workarounds ------------ - -At the moment, our ``gc`` module is incomplete, making the Zope test -runner unhappy. Quick workaround: go to the -``lib-python/modified-2.4.1`` directory and create a -``sitecustomize.py`` with the following content:: - - print "" - import gc - gc.get_threshold = lambda : (0, 0, 0) - gc.get_debug = lambda : 0 - gc.garbage = [] - -Running the tests ------------------ - -To run the tests we need the --oldstyle option, as follows:: - - cd $PKG - pypy-c --oldstyle bin/test diff --git a/pypy/doc/config/objspace.honor__builtins__.txt b/pypy/doc/config/objspace.honor__builtins__.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.std.withrangelist.txt b/pypy/doc/config/objspace.std.withrangelist.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrangelist.txt +++ /dev/null @@ -1,11 +0,0 @@ -Enable "range list" objects. They are an additional implementation of the Python -``list`` type, indistinguishable for the normal user. Whenever the ``range`` -builtin is called, an range list is returned. As long as this list is not -mutated (and for example only iterated over), it uses only enough memory to -store the start, stop and step of the range. This makes using ``range`` as -efficient as ``xrange``, as long as the result is only used in a ``for``-loop. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#range-lists - diff --git a/pypy/doc/config/objspace.std.optimized_comparison_op.txt b/pypy/doc/config/objspace.std.optimized_comparison_op.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_comparison_op.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimize the comparison of two integers a bit. diff --git a/pypy/doc/config/objspace.soabi.txt b/pypy/doc/config/objspace.soabi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.soabi.txt +++ /dev/null @@ -1,14 +0,0 @@ -This option controls the tag included into extension module file names. The -default is something like `pypy-14`, which means that `import foo` will look for -a file named `foo.pypy-14.so` (or `foo.pypy-14.pyd` on Windows). - -This is an implementation of PEP3149_, with two differences: - - * the filename without tag `foo.so` is not considered. - * the feature is also available on Windows. - -When set to the empty string (with `--soabi=`), the interpreter will only look -for a file named `foo.so`, and will crash if this file was compiled for another -Python interpreter. - -.. _PEP3149: http://www.python.org/dev/peps/pep-3149/ diff --git a/pypy/doc/config/objspace.usemodules._collections.txt b/pypy/doc/config/objspace.usemodules._collections.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._collections.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_collections' module. -Used by the 'collections' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.micronumpy.txt b/pypy/doc/config/objspace.usemodules.micronumpy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.micronumpy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the micronumpy module. -This module provides a very basic numpy-like interface. Major use-case -is to show how jit scales for other code. diff --git a/pypy/doc/config/objspace.std.withropeunicode.txt b/pypy/doc/config/objspace.std.withropeunicode.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withropeunicode.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use ropes to implement unicode strings (and also normal strings). - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/externaltools.txt b/pypy/doc/externaltools.txt deleted file mode 100644 --- a/pypy/doc/externaltools.txt +++ /dev/null @@ -1,27 +0,0 @@ -External tools&programs needed by PyPy -====================================== - -Tools needed for testing ------------------------- - -These tools are used in various ways by PyPy tests; if they are not found, -some tests might be skipped, so they need to be installed on every buildbot -slave to be sure we actually run all tests: - - - Mono (versions 1.2.1.1 and 1.9.1 known to work) - - - Java/JVM (preferably sun-jdk; version 1.6.0 known to work) - - - Jasmin >= 2.2 (copy it from wyvern, /usr/local/bin/jasmin and /usr/local/share/jasmin.jar) - - - gcc - - - Some libraries (these are Debian package names, adapt as needed): - - * ``python-dev`` - * ``python-ctypes`` - * ``libffi-dev`` - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libgc-dev`` (only when translating with `--opt=0, 1` or `size`) diff --git a/pypy/doc/config/objspace.std.prebuiltintto.txt b/pypy/doc/config/objspace.std.prebuiltintto.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintto.txt +++ /dev/null @@ -1,1 +0,0 @@ -See :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.std.multimethods.txt b/pypy/doc/config/objspace.std.multimethods.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.multimethods.txt +++ /dev/null @@ -1,8 +0,0 @@ -Choose the multimethod implementation. - -* ``doubledispatch`` turns - a multimethod call into a sequence of normal method calls. - -* ``mrd`` uses a technique known as Multiple Row Displacement - which precomputes a few compact tables of numbers and - function pointers. diff --git a/pypy/doc/config/objspace.usemodules._ast.txt b/pypy/doc/config/objspace.usemodules._ast.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ast.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_ast' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._multiprocessing.txt b/pypy/doc/config/objspace.usemodules._multiprocessing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._multiprocessing.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_multiprocessing' module. -Used by the 'multiprocessing' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.make_jobs.txt b/pypy/doc/config/translation.make_jobs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.make_jobs.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify number of make jobs for make command. diff --git a/pypy/doc/config/objspace.disable_call_speedhacks.txt b/pypy/doc/config/objspace.disable_call_speedhacks.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.disable_call_speedhacks.txt +++ /dev/null @@ -1,2 +0,0 @@ -disable the speed hacks that the interpreter normally does. Usually you don't -want to set this to False, but some object spaces require it. diff --git a/pypy/doc/discussion/howtoimplementpickling.txt b/pypy/doc/discussion/howtoimplementpickling.txt deleted file mode 100644 --- a/pypy/doc/discussion/howtoimplementpickling.txt +++ /dev/null @@ -1,340 +0,0 @@ -Designing thread pickling or "the Essence of Stackless Python" --------------------------------------------------------------- - -Note from 2007-07-22: This document is slightly out of date -and should be turned into a description of pickling. -Some research is necessary to get rid of explicit resume points, etc... - -Thread pickling is a unique feature in Stackless Python -and should be implemented for PyPy pretty soon. - -What is meant by pickling? -.......................... - -I'd like to define thread pickling as a restartable subset -of a running program. The re-runnable part should be based -upon Python frame chains, represented by coroutines, tasklets -or any other application level switchable subcontext. -It is surely possible to support pickling of arbitrary -interplevel state, but this seems to be not mandatory as long -as we consider Stackless as the reference implementation. -Extensions of this might be considered when the basic task -is fulfilled. - -Pickling should create a re-startable coroutine-alike thing -that can run on a different machine, same Python version, -but not necessarily the same PyPy translation. This belongs -to the harder parts. - -What is not meant by pickling? -.............................. - -Saving the whole memory state and writing a loader that -reconstructs the whole binary with its state im memory -is not what I consider a real solution. In some sense, -this can be a fall-back if we fail in every other case, -but I consider it really nasty for the C backend. - -If we had a dynamic backend that supports direct creation -of the program and its state (example: a Forth backend), -I would see it as a valid solution, since it is -relocatable. It is of course a possible fall-back to write -such a backend of we fail otherwise. - -There are some simple steps and some more difficult ones. -Let's start with the simple. - -Basic necessities -................. - -Pickling of a running thread involves a bit more than normal -object pickling, because there exist many objects which -don't have a pickling interface, and people would not care -about pickling them at all. But with thread pickling, these -objects simply exist as local variables and are needed -to restore the current runtime environment, and the user -should not have to know what goes into the pickle. - -Examples are - -- generators -- frames -- cells -- iterators -- tracebacks - -to name just a few. Fortunately most of these objects already have -got a pickling implementation in Stackless Python, namely the -prickelpit.c file. - -It should be simple and straightforward to redo these implementations. -Nevertheless there is a complication. The most natural way to support -pickling is providing a __getstate__/__setstate__ method pair. -This is ok for extension types like coroutines/tasklets which we can -control, but it should be avoided for existing types. - -Consider for instance frames. We would have to add a __getstate__ -and a __setstate__ method, which is an interface change. Furthermore, -we would need to support creation of frames by calling the -frame type, which is not really intended. - -For other types with are already callable, things get more complicated -because we need to make sure that creating new instances does -not interfere with existing ways to call the type. - -Directly adding a pickling interface to existing types is quite -likely to produce overlaps in the calling interface. This happened -for instance, when the module type became callable, and the signature -was different from what Stackless added before. - -For Stackless, -I used the copyreg module, instead, and created special surrogate -objects as placeholders, which replace the type of the object -after unpickling with the right type pointer. For details, see -the prickelpit.c file in the Stackless distribution. - -As a conclusion, pickling of tasklets is an addition to Stackless, -but not meant to be an extension to Python. The need to support -pickling of certain objects should not change the interface. -It is better to decouple this and to use surrogate types for -pickling which cannot collide with future additions to Python. - -The real problem -................ - -There are currently some crucial differences between Stackless -Python (SLP for now) and the PyPy Stackless support (PyPy for now) -as far as it is grown. -When CPython does a call to a Python function, there are several -helper functions involved for adjusting parameters, unpacking -methods and some more. SLP takes a hard time to remove all these -C functions from the C stack before starting the Python interpreter -for the function. This change of behavior is done manually for -all the helper functions by figuring out, which variables are -still needed after the call. It turns out that in most cases, -it is possible to let all the helper functions finish their -work and return form the function call before the interpreter -is started at all. - -This is the major difference which needs to be tackled for PyPy. -Whenever we run a Python function, quite a number of functions -incarnate on the C stack, and they get *not* finished before -running the new frame. In case of a coroutine switch, we just -save the whole chain of activation records - c function -entrypoints with the saved block variables. This is ok for -coroutine switching, but in the sense of SLP, it is rather -incomplete and not stackless at all. The stack still exists, -we can unwind and rebuild it, but it is a problem. - -Why a problem? -.............. - -In an ideal world, thread pickling would just be building -chains of pickled frames and nothing else. For every different -extra activation record like mentioned above, we have the -problem of how to save this information. We need a representation -which is not machine or compiler dependent. Right now, PyPy -is quite unstable in terms of which blocks it will produce, -what gets inlined, etc. The best solution possible is to try -to get completely rid of these extra structures. - -Unfortunately this is not even possible with SLP, because -there are different flavors of state which make it hard -to go without extra information. - -SLP switching strategies -........................ - -SLP has undergone several rewrites. The first implementation was aiming -at complete collaboration. A new frame's execution was deferred until -all the preparational C function calls had left the C stack. There -was no extra state to be saved. - -Well, this is only partially true - there are a couple of situations -where a recursive call could not be avoided, since the necessary support -would require heavy rewriting of the implementation. - -Examples are - -- map is a stateful implementation of iterating over a sequence - of operations. It can be made non-recursive if the map operation - creates its own frame to keep state. - -- __init__ looks trivial, but the semantics is that the return value - of __init__ is supposed to be None, and CPy has a special check for this - after the call. This might simply be ignored, but it is a simple example - for a case that cannot be handled automatically. - -- things like operator.__add__ can theoretically generate a wild pattern - of recursive calls while CPy tries to figure out if it is a numeric - add or a sequence add, and other callbacks may occur when methods - like __coerce__ get involved. This will never be solved for SLP, but - might get a solution by the strategy outlined below. - -The second implementation took a radically different approach. Context -switches were done by hijacking parts of the C stack, storing them -away and replacing them by the stack fragment that the target needs. -This is very powerful and allows to switch even in the context of -foreign code. With a little risk, I was even able to add concurrency -to foreign Fortran code. - -The above concept is called Hard (switching), the collaborative Soft (switching). -Note that an improved version of Hard is still the building block -for greenlets, which makes them not really green - I'd name it yellow. - -The latest SLP rewrites combine both ideas, trying to use Soft whenever -possible, but using Hard when nested interpreters are in the way. - -Notabene, it was never tried to pickle tasklets when Hard -was involved. In SLP, pickling works with Soft. To gather more -pickleable situations, you need to invent new frame types -or write replacement Python code and switch it using Soft. - -Analogies between SLP and PyPy -.............................. - -Right now, PyPy saves C state of functions in tiny activation records: -the alive variables of a block, together with the entry point of -the function that was left. -This is an improvement over storing raw stack slices, but the pattern -is similar: The C stack state gets restored when we switch. - -In this sense, it was the astonishing resume when Richard and I discussed -this last week: PyPy essentially does a variant of Hard switching! At least it -does a compromise that does not really help with pickling. - -On the other hand, this approach is half the way. It turns out to -be an improvement over SLP not to have to avoid recursions in the -first place. Instead, it seems to be even more elegant and efficient -to get rid of unnecessary state right in the context of a switch -and no earlier! - -Ways to handle the problem in a minimalistic way -................................................ - -Comparing the different approaches of SLP and PyPy, it appears to be -not necessary to change the interpreter in the first place. PyPy does -not need to change its calling behavior in order to be cooperative. -The key point is to find out which activation records need to -be stored at all. This should be possible to identify as a part -of the stackless transform. - -Consider the simple most common case of calling a normal Python function. -There are several calls to functions involved, which do preparational -steps. Without trying to be exact (this is part of the work to be done), -involved steps are - -- decode the arguments of the function - -- prepare a new frame - -- store the arguments in the frame - -- execute the frame - -- return the result - -Now assume that we do not execute the frame, but do a context switch instead, -then right now a sequence of activation records is stored on the heap. -If we want to re-activate this chain of activation records, what do -we really need to restore before we can do the function call? - -- the argument decoding is done, already, and the fact that we could have done - the function call shows, that no exception occurred. We can ignore the rest - of this activation record and do the housekeeping. - -- the frame is prepared, and arguments are stored in it. The operation - succeeded, and we have the frame. We can ignore exception handling - and just do housekeeping by getting rid of references. - -- for executing the frame, we need a special function that executes frames. It - is possible that we need different flavors due to contexts. SLP does this - by using different registered functions which operate on a frame, depending - on the frame's state (first entry, reentry after call, returning, yielding etc) - -- after executing the frame, exceptions need to be handled in the usual way, - and we should return to the issuer of the call. - -Some deeper analysis is needed to get these things correct. -But it should have become quite clear, that after all the preparational -steps have been done, there is no other state necessary than what we -have in the Python frames: bound arguments, instruction pointer, that's it. - -My proposal is now to do such an analysis by hand, identify the different -cases to be handled, and then trying to find an algorithm that automatically -identifies the blocks in the whole program, where the restoring of the -C stack can be avoided, and we can jump back to the previous caller, directly. - -A rough sketch of the necessary analysis: - -for every block in an RPython function that can reach unwind: -Analyze control flow. It should be immediately leading to -the return block with only one output variable. All other alive variables -should have ended their liveness in this block. - -I think this will not work in the first place. For the bound frame -arguments for instance, I think we need some notation that these are -held by the frame, and we can drop their liveness before doing the call, -hence we don't need to save these variables in the activation record, -and hence the whole activation record can be removed. - -As a conclusion of this incomplete first analysis, it seems to be necessary -to identify useless activation records in order to support pickling. -The remaining, irreducible activation records should then be those -which hold a reference to a Python frame. -Such a chain is pickleable if its root points back to the context switching code -of the interp-level implementation of coroutines. - -As an observation, this transform not only enables pickling, but -also is an optimization, if we can avoid saving many activation records. - -Another possible observation which I hope to be able to prove is this: -The remaining irreducible activation records which don't just hold -a Python frame are those which should be considered special. -They should be turned into something like special frames, and they would -be the key to make PyPy completely stackless, a goal which is practically -impossible for SLP! These activation records would need to become -part of the official interface and need to get naming support for -their necessary functions. - -I wish to stop this paper here. I believe everything else -needs to be tried in an implementation, and this is so far -all I can do just with imagination. - -best - chris - -Just an addition after some more thinking -......................................... - -Actually it struck me after checking this in, that the problem of -determining which blocks need to save state and which not it not -really a Stackless problem. It is a system-immanent problem -of a missing optimization that we still did not try to solve. - -Speaking in terms of GC transform, and especially the refcounting, -it is probably easy to understand what I mean. Our current refcounting -implementation is naive, in the sense that we do not try to do the -optimizations which every extension writer does by hand: -We do not try to save references. - -This is also why I'm always arguing that refcounting can be and -effectively *is* efficient, because CPython does it very well. - -Our refcounting is not aware of variable lifeness, it does not -track references which are known to be held by other objects. -Optimizing that would do two things: The refcounting would become -very efficient, since we would save some 80 % of it. -The second part, which is relevant to the pickling problem is this: -By doing a proper analysis, we already would have lost references to -all the variables which we don't need to save any longer, because -we know that they are held in, for instance, frames. - -I hope you understand that: If we improve the life-time analysis -of variables, the sketched problem of above about which blocks -need to save state and which don't, should become trivial and should -just vanish. Doing this correctly will solve the pickling problem quasi -automatically, leading to a more efficient implementation at the same time. - -I hope I told the truth and will try to prove it. - -ciao - chris diff --git a/pypy/doc/config/objspace.usemodules.__builtin__.txt b/pypy/doc/config/objspace.usemodules.__builtin__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__builtin__.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '__builtin__' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/objspace.usemodules.signal.txt b/pypy/doc/config/objspace.usemodules.signal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.signal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'signal' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules._io.txt b/pypy/doc/config/objspace.usemodules._io.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._io.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_io module. -Used by the 'io' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules._warnings.txt b/pypy/doc/config/objspace.usemodules._warnings.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._warnings.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_warning' module. This module is expected to be working and is included by default. diff --git a/pypy/doc/docindex.txt b/pypy/doc/docindex.txt deleted file mode 100644 --- a/pypy/doc/docindex.txt +++ /dev/null @@ -1,314 +0,0 @@ -================================================= -PyPy - a Python_ implementation written in Python -================================================= - -.. _Python: http://www.python.org/doc/2.5.2/ - -.. sectnum:: -.. contents:: :depth: 1 - - -PyPy User Documentation -=============================================== - -`getting started`_ provides hands-on instructions -including a two-liner to run the PyPy Python interpreter -on your system, examples on advanced features and -entry points for using PyPy's translation tool chain. - -`FAQ`_ contains some frequently asked questions. - -New features of PyPy's Python Interpreter and -Translation Framework: - - * `Differences between PyPy and CPython`_ - * `What PyPy can do for your objects`_ - * `Stackless and coroutines`_ - * `JIT Generation in PyPy`_ - * `Sandboxing Python code`_ - -Status_ of the project. - - -Project Documentation -===================================== - -PyPy was funded by the EU for several years. See the `web site of the EU -project`_ for more details. - -.. _`web site of the EU project`: http://pypy.org - -architecture_ gives a complete view of PyPy's basic design. - -`coding guide`_ helps you to write code for PyPy (especially also describes -coding in RPython a bit). - -`sprint reports`_ lists reports written at most of our sprints, from -2003 to the present. - -`papers, talks and related projects`_ lists presentations -and related projects as well as our published papers. - -`ideas for PyPy related projects`_ which might be a good way to get -into PyPy. - -`PyPy video documentation`_ is a page linking to the videos (e.g. of talks and -introductions) that are available. - -`Technical reports`_ is a page that contains links to the -reports that we submitted to the European Union. - -`development methodology`_ describes our sprint-driven approach. - -`license`_ contains licensing details (basically a straight MIT-license). - -`Glossary`_ of PyPy words to help you align your inner self with -the PyPy universe. - - -Status -=================================== - -PyPy can be used to run Python programs on Linux, OS/X, -Windows, on top of .NET, and on top of Java. -To dig into PyPy it is recommended to try out the current -Subversion HEAD, which is always working or mostly working, -instead of the latest release, which is `1.2.0`__. - -.. __: release-1.2.0.html - -PyPy is mainly developed on Linux and Mac OS X. Windows is supported, -but platform-specific bugs tend to take longer before we notice and fix -them. Linux 64-bit machines are supported (though it may also take some -time before we notice and fix bugs). - -PyPy's own tests `summary`_, daily updated, run through BuildBot infrastructure. -You can also find CPython's compliance tests run with compiled ``pypy-c`` -executables there. - -information dating from early 2007: - -`PyPy LOC statistics`_ shows LOC statistics about PyPy. - -`PyPy statistics`_ is a page with various statistics about the PyPy project. - -`compatibility matrix`_ is a diagram that shows which of the various features -of the PyPy interpreter work together with which other features. - - -Source Code Documentation -=============================================== - -`object spaces`_ discusses the object space interface -and several implementations. - -`bytecode interpreter`_ explains the basic mechanisms -of the bytecode interpreter and virtual machine. - -`interpreter optimizations`_ describes our various strategies for -improving the performance of our interpreter, including alternative -object implementations (for strings, dictionaries and lists) in the -standard object space. - -`translation`_ is a detailed overview of our translation process. The -rtyper_ is the largest component of our translation process. - -`dynamic-language translation`_ is a paper that describes -the translation process, especially the flow object space -and the annotator in detail. (This document is one -of the `EU reports`_.) - -`low-level encapsulation`_ describes how our approach hides -away a lot of low level details. This document is also part -of the `EU reports`_. - -`translation aspects`_ describes how we weave different -properties into our interpreter during the translation -process. This document is also part of the `EU reports`_. - -`garbage collector`_ strategies that can be used by the virtual -machines produced by the translation process. - -`parser`_ contains (outdated, unfinished) documentation about -the parser. - -`rlib`_ describes some modules that can be used when implementing programs in -RPython. - -`configuration documentation`_ describes the various configuration options that -allow you to customize PyPy. - -`CLI backend`_ describes the details of the .NET backend. - -`JIT Generation in PyPy`_ describes how we produce the Python Just-in-time Compiler -from our Python interpreter. - - - -.. _`FAQ`: faq.html -.. _Glossary: glossary.html -.. _`PyPy video documentation`: video-index.html -.. _parser: parser.html -.. _`development methodology`: dev_method.html -.. _`sprint reports`: sprint-reports.html -.. _`papers, talks and related projects`: extradoc.html -.. _`license`: ../../LICENSE -.. _`PyPy LOC statistics`: http://codespeak.net/~hpk/pypy-stat/ -.. _`PyPy statistics`: http://codespeak.net/pypy/trunk/pypy/doc/statistic -.. _`object spaces`: objspace.html -.. _`interpreter optimizations`: interpreter-optimizations.html -.. _`translation`: translation.html -.. _`dynamic-language translation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`low-level encapsulation`: low-level-encapsulation.html -.. _`translation aspects`: translation-aspects.html -.. _`configuration documentation`: config/ -.. _`coding guide`: coding-guide.html -.. _`architecture`: architecture.html -.. _`getting started`: getting-started.html -.. _`theory`: theory.html -.. _`bytecode interpreter`: interpreter.html -.. _`EU reports`: index-report.html -.. _`Technical reports`: index-report.html -.. _`summary`: http://codespeak.net:8099/summary -.. _`ideas for PyPy related projects`: project-ideas.html -.. _`Nightly builds and benchmarks`: http://tuatara.cs.uni-duesseldorf.de/benchmark.html -.. _`directory reference`: -.. _`rlib`: rlib.html -.. _`Sandboxing Python code`: sandbox.html - -PyPy directory cross-reference ------------------------------- - -Here is a fully referenced alphabetical two-level deep -directory overview of PyPy: - -============================ =========================================== -Directory explanation/links -============================ =========================================== -`annotation/`_ `type inferencing code`_ for `RPython`_ programs - -`bin/`_ command-line scripts, mainly `py.py`_ and `translatorshell.py`_ - -`config/`_ handles the numerous options for building and running PyPy - -`doc/`_ text versions of PyPy developer documentation - -`doc/config/`_ documentation for the numerous translation options - -`doc/discussion/`_ drafts of ideas and documentation - -``doc/*/`` other specific documentation topics or tools - -`interpreter/`_ `bytecode interpreter`_ and related objects - (frames, functions, modules,...) - -`interpreter/pyparser/`_ interpreter-level Python source parser - -`interpreter/astcompiler/`_ interpreter-level bytecode compiler, via an AST - representation - -`module/`_ contains `mixed modules`_ implementing core modules with - both application and interpreter level code. - Not all are finished and working. Use the ``--withmod-xxx`` - or ``--allworkingmodules`` translation options. - -`objspace/`_ `object space`_ implementations - -`objspace/trace.py`_ the `trace object space`_ monitoring bytecode and space operations - -`objspace/dump.py`_ the dump object space saves a large, searchable log file - with all operations - -`objspace/taint.py`_ the `taint object space`_, providing object tainting - -`objspace/thunk.py`_ the `thunk object space`_, providing unique object features - -`objspace/flow/`_ the FlowObjSpace_ implementing `abstract interpretation` - -`objspace/std/`_ the StdObjSpace_ implementing CPython's objects and types - -`rlib/`_ a `"standard library"`_ for RPython_ programs - -`rpython/`_ the `RPython Typer`_ - -`rpython/lltypesystem/`_ the `low-level type system`_ for C-like backends - -`rpython/ootypesystem/`_ the `object-oriented type system`_ for OO backends - -`rpython/memory/`_ the `garbage collector`_ construction framework - -`tool/`_ various utilities and hacks used from various places - -`tool/algo/`_ general-purpose algorithmic and mathematic - tools - -`tool/pytest/`_ support code for our `testing methods`_ - -`translator/`_ translation_ backends and support code - -`translator/backendopt/`_ general optimizations that run before a backend generates code - -`translator/c/`_ the `GenC backend`_, producing C code from an - RPython program (generally via the rtyper_) - -`translator/cli/`_ the `CLI backend`_ for `.NET`_ (Microsoft CLR or Mono_) - -`translator/goal/`_ our `main PyPy-translation scripts`_ live here - -`translator/jvm/`_ the Java backend - -`translator/stackless/`_ the `Stackless Transform`_ - -`translator/tool/`_ helper tools for translation, including the Pygame - `graph viewer`_ - -``*/test/`` many directories have a test subdirectory containing test - modules (see `Testing in PyPy`_) - -``_cache/`` holds cache files from internally `translating application - level to interpreterlevel`_ code. -============================ =========================================== - -.. _`bytecode interpreter`: interpreter.html -.. _`translating application level to interpreterlevel`: geninterp.html -.. _`Testing in PyPy`: coding-guide.html#testing-in-pypy -.. _`mixed modules`: coding-guide.html#mixed-modules -.. _`modules`: coding-guide.html#modules -.. _`basil`: http://people.cs.uchicago.edu/~jriehl/BasilTalk.pdf -.. _`object space`: objspace.html -.. _FlowObjSpace: objspace.html#the-flow-object-space -.. _`trace object space`: objspace.html#the-trace-object-space -.. _`taint object space`: objspace-proxies.html#taint -.. _`thunk object space`: objspace-proxies.html#thunk -.. _`transparent proxies`: objspace-proxies.html#tproxy -.. _`Differences between PyPy and CPython`: cpython_differences.html -.. _`What PyPy can do for your objects`: objspace-proxies.html -.. _`Stackless and coroutines`: stackless.html -.. _StdObjSpace: objspace.html#the-standard-object-space -.. _`abstract interpretation`: theory.html#abstract-interpretation -.. _`rpython`: coding-guide.html#rpython -.. _`type inferencing code`: translation.html#the-annotation-pass -.. _`RPython Typer`: translation.html#rpython-typer -.. _`testing methods`: coding-guide.html#testing-in-pypy -.. _`translation`: translation.html -.. _`GenC backend`: translation.html#genc -.. _`CLI backend`: cli-backend.html -.. _`py.py`: getting-started-python.html#the-py.py-interpreter -.. _`translatorshell.py`: getting-started-dev.html#try-out-the-translator -.. _JIT: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html -.. _`just-in-time compiler generator`: jit/index.html -.. _rtyper: rtyper.html -.. _`low-level type system`: rtyper.html#low-level-type -.. _`object-oriented type system`: rtyper.html#oo-type -.. _`garbage collector`: garbage_collection.html -.. _`Stackless Transform`: translation.html#the-stackless-transform -.. _`main PyPy-translation scripts`: getting-started-python.html#translating-the-pypy-python-interpreter -.. _`.NET`: http://www.microsoft.com/net/ -.. _Mono: http://www.mono-project.com/ -.. _`"standard library"`: rlib.html -.. _`graph viewer`: getting-started-dev.html#try-out-the-translator -.. _`compatibility matrix`: image/compat-matrix.png - -.. include:: _ref.txt - diff --git a/pypy/doc/config/objspace.usemodules.parser.txt b/pypy/doc/config/objspace.usemodules.parser.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.parser.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'parser' module. -This is PyPy implementation of the standard library 'parser' module (e.g. if -this option is enabled and you say ``import parser`` you get this module). -It is enabled by default. diff --git a/pypy/doc/cli-backend.txt b/pypy/doc/cli-backend.txt deleted file mode 100644 --- a/pypy/doc/cli-backend.txt +++ /dev/null @@ -1,455 +0,0 @@ -=============== -The CLI backend -=============== - -The goal of GenCLI is to compile RPython programs to the CLI virtual -machine. - - -Target environment and language -=============================== - -The target of GenCLI is the Common Language Infrastructure environment -as defined by the `Standard Ecma 335`_. - -While in an ideal world we might suppose GenCLI to run fine with -every implementation conforming to that standard, we know the world we -live in is far from ideal, so extra efforts can be needed to maintain -compatibility with more than one implementation. - -At the moment of writing the two most popular implementations of the -standard are supported: Microsoft Common Language Runtime (CLR) and -Mono. - -Then we have to choose how to generate the real executables. There are -two main alternatives: generating source files in some high level -language (such as C#) or generating assembly level code in -Intermediate Language (IL). - -The IL approach is much faster during the code generation -phase, because it doesn't need to call a compiler. By contrast the -high level approach has two main advantages: - - - the code generation part could be easier because the target - language supports high level control structures such as - structured loops; - - - the generated executables take advantage of compiler's - optimizations. - -In reality the first point is not an advantage in the PyPy context, -because the `flow graph`_ we start from is quite low level and Python -loops are already expressed in terms of branches (i.e., gotos). - -About the compiler optimizations we must remember that the flow graph -we receive from earlier stages is already optimized: PyPy implements -a number of optimizations such a constant propagation and -dead code removal, so it's not obvious if the compiler could -do more. - -Moreover by emitting IL instruction we are not constrained to rely on -compiler choices but can directly choose how to map CLI opcodes: since -the backend often know more than the compiler about the context, we -might expect to produce more efficient code by selecting the most -appropriate instruction; e.g., we can check for arithmetic overflow -only when strictly necessary. - -The last but not least reason for choosing the low level approach is -flexibility in how to get an executable starting from the IL code we -generate: - - - write IL code to a file, then call the ilasm assembler; - - - directly generate code on the fly by accessing the facilities - exposed by the System.Reflection.Emit API. - - -Handling platform differences -============================= - -Since our goal is to support both Microsoft CLR we have to handle the -differences between the twos; in particular the main differences are -in the name of the helper tools we need to call: - -=============== ======== ====== -Tool CLR Mono -=============== ======== ====== -IL assembler ilasm ilasm2 -C# compiler csc gmcs -Runtime ... mono -=============== ======== ====== - -The code that handles these differences is located in the sdk.py -module: it defines an abstract class which exposes some methods -returning the name of the helpers and one subclass for each of the two -supported platforms. - -Since Microsoft ``ilasm`` is not capable of compiling the PyPy -standard interpreter due to its size, on Windows machines we also look -for an existing Mono installation: if present, we use CLR for -everything except the assembling phase, for which we use Mono's -``ilasm2``. - - -Targeting the CLI Virtual Machine -================================= - -In order to write a CLI backend we have to take a number of decisions. -First, we have to choose the typesystem to use: given that CLI -natively supports primitives like classes and instances, -ootypesystem is the most natural choice. - -Once the typesystem has been chosen there is a number of steps we have -to do for completing the backend: - - - map ootypesystem's types to CLI Common Type System's - types; - - - map ootypesystem's low level operation to CLI instructions; - - - map Python exceptions to CLI exceptions; - - - write a code generator that translates a flow graph - into a list of CLI instructions; - - - write a class generator that translates ootypesystem - classes into CLI classes. - - -Mapping primitive types ------------------------ - -The `rtyper`_ give us a flow graph annotated with types belonging to -ootypesystem: in order to produce CLI code we need to translate these -types into their Common Type System equivalents. - -For numeric types the conversion is straightforward, since -there is a one-to-one mapping between the two typesystems, so that -e.g. Float maps to float64. - -For character types the choice is more difficult: RPython has two -distinct types for plain ASCII and Unicode characters (named UniChar), -while .NET only supports Unicode with the char type. There are at -least two ways to map plain Char to CTS: - - - map UniChar to char, thus maintaining the original distinction - between the two types: this has the advantage of being a - one-to-one translation, but has the disadvantage that RPython - strings will not be recognized as .NET strings, since they only - would be sequences of bytes; - - - map both char, so that Python strings will be treated as strings - also by .NET: in this case there could be problems with existing - Python modules that use strings as sequences of byte, such as the - built-in struct module, so we need to pay special attention. - -We think that mapping Python strings to .NET strings is -fundamental, so we chose the second option. - -Mapping built-in types ----------------------- - -As we saw in section ootypesystem defines a set of types that take -advantage of built-in types offered by the platform. - -For the sake of simplicity we decided to write wrappers -around .NET classes in order to match the signatures required by -pypylib.dll: - -=================== =========================================== -ootype CLI -=================== =========================================== -String System.String -StringBuilder System.Text.StringBuilder -List System.Collections.Generic.List -Dict System.Collections.Generic.Dictionary -CustomDict pypy.runtime.Dict -DictItemsIterator pypy.runtime.DictItemsIterator -=================== =========================================== - -Wrappers exploit inheritance for wrapping the original classes, so, -for example, pypy.runtime.List is a subclass of -System.Collections.Generic.List that provides methods whose names -match those found in the _GENERIC_METHODS of ootype.List - -The only exception to this rule is the String class, which is not -wrapped since in .NET we can not subclass System.String. Instead, we -provide a bunch of static methods in pypylib.dll that implement the -methods declared by ootype.String._GENERIC_METHODS, then we call them -by explicitly passing the string object in the argument list. - - -Mapping instructions --------------------- - -PyPy's low level operations are expressed in Static Single Information -(SSI) form, such as this:: - - v2 = int_add(v0, v1) - -By contrast the CLI virtual machine is stack based, which means the -each operation pops its arguments from the top of the stacks and -pushes its result there. The most straightforward way to translate SSI -operations into stack based operations is to explicitly load the -arguments and store the result into the appropriate places:: - - LOAD v0 - LOAD v1 - int_add - STORE v2 - -The code produced works correctly but has some inefficiency issue that -can be addressed during the optimization phase. - -The CLI Virtual Machine is fairly expressive, so the conversion -between PyPy's low level operations and CLI instruction is relatively -simple: many operations maps directly to the correspondent -instruction, e.g int_add and sub. - -By contrast some instructions do not have a direct correspondent and -have to be rendered as a sequence of CLI instructions: this is the -case of the "less-equal" and "greater-equal" family of instructions, -that are rendered as "greater" or "less" followed by a boolean "not", -respectively. - -Finally, there are some instructions that cannot be rendered directly -without increasing the complexity of the code generator, such as -int_abs (which returns the absolute value of its argument). These -operations are translated by calling some helper function written in -C#. - -The code that implements the mapping is in the modules opcodes.py. - -Mapping exceptions ------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -At the moment we've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by built-in operations. The currently -implemented solution is to do an exception translation on-the-fly. - -As an example consider the RPython int_add_ovf operation, that sums -two integers and raises an OverflowError exception in case of -overflow. For implementing it we can use the built-in add.ovf CLI -instruction that raises System.OverflowException when the result -overflows, catch that exception and throw a new one:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class OverflowError::.ctor() - throw - } - - -Translating flow graphs ------------------------ - -As we saw previously in PyPy function and method bodies are -represented by flow graphs that we need to translate CLI IL code. Flow -graphs are expressed in a format that is very suitable for being -translated to low level code, so that phase is quite straightforward, -though the code is a bit involved because we need to take care of three -different types of blocks. - -The code doing this work is located in the Function.render -method in the file function.py. - -First of all it searches for variable names and types used by -each block; once they are collected it emits a .local IL -statement used for indicating the virtual machine the number and type -of local variables used. - -Then it sequentially renders all blocks in the graph, starting from the -start block; special care is taken for the return block which is -always rendered at last to meet CLI requirements. - -Each block starts with an unique label that is used for jumping -across, followed by the low level instructions the block is composed -of; finally there is some code that jumps to the appropriate next -block. - -Conditional and unconditional jumps are rendered with their -corresponding IL instructions: brtrue, brfalse. - -Blocks that needs to catch exceptions use the native facilities -offered by the CLI virtual machine: the entire block is surrounded by -a .try statement followed by as many catch as needed: each catching -sub-block then branches to the appropriate block:: - - - # RPython - try: - # block0 - ... - except ValueError: - # block1 - ... - except TypeError: - # block2 - ... - - // IL - block0: - .try { - ... - leave block3 - } - catch ValueError { - ... - leave block1 - } - catch TypeError { - ... - leave block2 - } - block1: - ... - br block3 - block2: - ... - br block3 - block3: - ... - -There is also an experimental feature that makes GenCLI to use its own -exception handling mechanism instead of relying on the .NET -one. Surprisingly enough, benchmarks are about 40% faster with our own -exception handling machinery. - - -Translating classes -------------------- - -As we saw previously, the semantic of ootypesystem classes -is very similar to the .NET one, so the translation is mostly -straightforward. - -The related code is located in the module class\_.py. Rendered classes -are composed of four parts: - - - fields; - - user defined methods; - - default constructor; - - the ToString method, mainly for testing purposes - -Since ootype implicitly assumes all method calls to be late bound, as -an optimization before rendering the classes we search for methods -that are not overridden in subclasses, and declare as "virtual" only -the one that needs to. - -The constructor does nothing more than calling the base class -constructor and initializing class fields to their default value. - -Inheritance is straightforward too, as it is natively supported by -CLI. The only noticeable thing is that we map ootypesystem's ROOT -class to the CLI equivalent System.Object. - -The Runtime Environment ------------------------ - -The runtime environment is a collection of helper classes and -functions used and referenced by many of the GenCLI submodules. It is -written in C#, compiled to a DLL (Dynamic Link Library), then linked -to generated code at compile-time. - -The DLL is called pypylib and is composed of three parts: - - - a set of helper functions used to implements complex RPython - low-level instructions such as runtimenew and ooparse_int; - - - a set of helper classes wrapping built-in types - - - a set of helpers used by the test framework - - -The first two parts are contained in the pypy.runtime namespace, while -the third is in the pypy.test one. - - -Testing GenCLI -============== - -As the rest of PyPy, GenCLI is a test-driven project: there is at -least one unit test for almost each single feature of the -backend. This development methodology allowed us to early discover -many subtle bugs and to do some big refactoring of the code with the -confidence not to break anything. - -The core of the testing framework is in the module -pypy.translator.cli.test.runtest; one of the most important function -of this module is compile_function(): it takes a Python function, -compiles it to CLI and returns a Python object that runs the just -created executable when called. - -This way we can test GenCLI generated code just as if it were a simple -Python function; we can also directly run the generated executable, -whose default name is main.exe, from a shell: the function parameters -are passed as command line arguments, and the return value is printed -on the standard output:: - - # Python source: foo.py - from pypy.translator.cli.test.runtest import compile_function - - def foo(x, y): - return x+y, x*y - - f = compile_function(foo, [int, int]) - assert f(3, 4) == (7, 12) - - - # shell - $ mono main.exe 3 4 - (7, 12) - -GenCLI supports only few RPython types as parameters: int, r_uint, -r_longlong, r_ulonglong, bool, float and one-length strings (i.e., -chars). By contrast, most types are fine for being returned: these -include all primitive types, list, tuples and instances. - -Installing Python for .NET on Linux -=================================== - -With the CLI backend, you can access .NET libraries from RPython; -programs using .NET libraries will always run when translated, but you -might also want to test them on top of CPython. - -To do so, you can install `Python for .NET`_. Unfortunately, it does -not work out of the box under Linux. - -To make it working, download and unpack the source package of Python -for .NET; the only version tested with PyPy is the 1.0-rc2, but it -might work also with others. Then, you need to create a file named -Python.Runtime.dll.config at the root of the unpacked archive; put the -following lines inside the file (assuming you are using Python 2.4):: - - - - - -The installation should be complete now. To run Python for .NET, -simply type ``mono python.exe``. - - -.. _`Standard Ecma 335`: http://www.ecma-international.org/publications/standards/Ecma-335.htm -.. _`flow graph`: translation.html#the-flow-model -.. _`rtyper`: rtyper.html -.. _`Python for .NET`: http://pythonnet.sourceforge.net/ diff --git a/pypy/doc/config/translation.backendopt.none.txt b/pypy/doc/config/translation.backendopt.none.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.none.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do not run any backend optimizations. diff --git a/pypy/doc/config/objspace.usemodules.txt b/pypy/doc/config/objspace.usemodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.clr.txt b/pypy/doc/config/objspace.usemodules.clr.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.clr.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'clr' module. diff --git a/pypy/doc/configuration.txt b/pypy/doc/configuration.txt deleted file mode 100644 --- a/pypy/doc/configuration.txt +++ /dev/null @@ -1,194 +0,0 @@ -============================= -PyPy's Configuration Handling -============================= - -Due to more and more available configuration options it became quite annoying to -hand the necessary options to where they are actually used and even more -annoying to add new options. To circumvent these problems the configuration -management was introduced. There all the necessary options are stored into an -configuration object, which is available nearly everywhere in the translation -toolchain and in the standard interpreter so that adding new options becomes -trivial. Options are organized into a tree. Configuration objects can be -created in different ways, there is support for creating an optparse command -line parser automatically. - - -Main Assumption -=============== - -Configuration objects are produced at the entry points and handed down to -where they are actually used. This keeps configuration local but available -everywhere and consistent. The configuration values can be created using the -command line (already implemented) or a file (still to be done). - - -API Details -=========== - -The handling of options is split into two parts: the description of which -options are available, what their possible values and defaults are and how they -are organized into a tree. A specific choice of options is bundled into a -configuration object which has a reference to its option description (and -therefore makes sure that the configuration values adhere to the option -description). -This splitting is remotely similar to the distinction between types and -instances in the type systems of the rtyper: the types describe what sort of -fields the instances have. - -The Options are organized in a tree. Every option has a name, as does every -option group. The parts of the full name of the option are separated by dots: -e.g. ``config.translation.thread``. - -Description of Options ----------------------- - -All the constructors take a ``name`` and a ``doc`` argument as first arguments -to give the option or option group a name and to document it. Most constructors -take a ``default`` argument that specifies the default value of the option. If -this argument is not supplied the default value is assumed to be ``None``. -Most constructors -also take a ``cmdline`` argument where you can specify what the command line -option should look like (for example cmdline="-v --version"). If ``cmdline`` is -not specified a default cmdline option is created that uses the name of the -option together with its full path. If ``None`` is passed in as ``cmdline`` then -no command line option is created at all. - -Some options types can specify requirements to specify that a particular choice -for one option works only if a certain choice for another option is used. A -requirement is specified using a list of pairs. The first element of the pair -gives the path of the option that is required to be set and the second element -gives the required value. - - -``OptionDescription`` -+++++++++++++++++++++ - -This class is used to group suboptions. - - ``__init__(self, name, doc, children)`` - ``children`` is a list of option descriptions (including - ``OptionDescription`` instances for nested namespaces). - -``ChoiceOption`` -++++++++++++++++ - -Represents a choice out of several objects. The option can also have the value -``None``. - - ``__init__(self, name, doc, values, default=None, requires=None, cmdline=DEFAULT)`` - ``values`` is a list of values the option can possibly take, - ``requires`` is a dictionary mapping values to lists of of two-element - tuples. - -``BoolOption`` -++++++++++++++ - -Represents a choice between ``True`` and ``False``. - - ``__init__(self, name, doc, default=None, requires=None, suggests=None, cmdline=DEFAULT, negation=True)`` - ``default`` specifies the default value of the option. ``requires`` is - a list of two-element tuples describing the requirements when the - option is set to true, ``suggests`` is a list of the same structure but - the options in there are only suggested, not absolutely necessary. The - difference is small: if the current option is set to True, both the - required and the suggested options are set. The required options cannot - be changed later, though. ``negation`` specifies whether the negative - commandline option should be generated. - - -``IntOption`` -+++++++++++++ - -Represents a choice of an integer. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``FloatOption`` -+++++++++++++++ - -Represents a choice of a floating point number. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - -``StrOption`` -+++++++++++++ - -Represents the choice of a string. - - ``__init__(self, name, doc, default=None, cmdline=DEFAULT)`` - - - - -Configuration Objects ---------------------- - -``Config`` objects hold the chosen values for the options (of the default, -if no choice was made). A ``Config`` object is described by an -``OptionDescription`` instance. The attributes of the ``Config`` objects are the -names of the children of the ``OptionDescription``. Example:: - - >>> from pypy.config.config import OptionDescription, Config, BoolOption - >>> descr = OptionDescription("options", "", [ - ... BoolOption("bool", "", default=False)]) - >>> - >>> config = Config(descr) - >>> config.bool - False - >>> config.bool = True - >>> config.bool - True - - -Description of the (useful) methods on ``Config``: - - ``__init__(self, descr, **overrides)``: - ``descr`` is an instance of ``OptionDescription`` that describes the - configuration object. ``overrides`` can be used to set different default - values (see method ``override``). - - ``override(self, overrides)``: - override default values. This marks the overridden values as defaults, - which makes it possible to change them (you can usually change values - only once). ``overrides`` is a dictionary of path strings to values. - - ``set(self, **kwargs)``: - "do what I mean"-interface to option setting. Searches all paths - starting from that config for matches of the optional arguments and sets - the found option if the match is not ambiguous. - - -Production of optparse Parsers ------------------------------- - -To produce an optparse parser use the function ``to_optparse``. It will create -an option parser using callbacks in such a way that the config object used for -creating the parser is updated automatically. - - ``to_optparse(config, useoptions=None, parser=None)``: - Returns an optparse parser. ``config`` is the configuration object for - which to create the parser. ``useoptions`` is a list of options for - which to create command line options. It can contain full paths to - options or also paths to an option description plus an additional ".*" - to produce command line options for all sub-options of that description. - If ``useoptions`` is ``None``, then all sub-options are turned into - cmdline options. ``parser`` can be an existing parser object, if - ``None`` is passed in, then a new one is created. - - -The usage of config objects in PyPy -=================================== - -The two large parts of PyPy, the standard interpreter and the translation -toolchain, have two separate sets of options. The translation toolchain options -can be found on the ``config`` attribute of all ``TranslationContext`` -instances and are described in translationoption.py_. The interpreter options -are attached to the object space, also under the name ``config`` and are -described in pypyoption.py_. - -.. _translationoption.py: ../config/translationoption.py -.. _pypyoption.py: ../config/pypyoption.py diff --git a/pypy/doc/config/objspace.usemodules._demo.txt b/pypy/doc/config/objspace.usemodules._demo.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._demo.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_demo' module. - -This is the demo module for mixed modules. Not enabled by default. diff --git a/pypy/doc/config/objspace.std.withcelldict.txt b/pypy/doc/config/objspace.std.withcelldict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withcelldict.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable cell-dicts. This optimization is not helpful without the JIT. In the -presence of the JIT, it greatly helps looking up globals. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._pickle_support.txt b/pypy/doc/config/objspace.usemodules._pickle_support.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._pickle_support.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_pickle_support' module. -Internal helpers for pickling runtime builtin types (frames, cells, etc) -for `stackless`_ tasklet pickling support. -.. _`stackless`: ../stackless.html - -.. internal diff --git a/pypy/doc/clr-module.txt b/pypy/doc/clr-module.txt deleted file mode 100644 --- a/pypy/doc/clr-module.txt +++ /dev/null @@ -1,143 +0,0 @@ -=============================== -The ``clr`` module for PyPy.NET -=============================== - -PyPy.NET give you access to the surrounding .NET environment via the -``clr`` module. This module is still experimental: some features are -still missing and its interface might change in next versions, but -it's still useful to experiment a bit with PyPy.NET. - -PyPy.NET provides an import hook that lets you to import .NET namespaces -seamlessly as they were normal Python modules. Then, - -PyPY.NET native classes try to behave as much as possible in the -"expected" way both for the developers used to .NET and for the ones -used to Python. - -In particular, the following features are mapped one to one because -they exist in both worlds: - - - .NET constructors are mapped to the Python __init__ method; - - - .NET instance methods are mapped to Python methods; - - - .NET static methods are mapped to Python static methods (belonging - to the class); - - - .NET properties are mapped to property-like Python objects (very - similar to the Python ``property`` built-in); - - - .NET indexers are mapped to Python __getitem__ and __setitem__; - - - .NET enumerators are mapped to Python iterators. - -Moreover, all the usual Python features such as bound and unbound -methods are available as well. - -Example of usage -================ - -Here is an example of interactive session using the ``clr`` module:: - - >>>> from System.Collections import ArrayList - >>>> obj = ArrayList() - >>>> obj.Add(1) - 0 - >>>> obj.Add(2) - 1 - >>>> obj.Add("foo") - 2 - >>>> print obj[0], obj[1], obj[2] - 1 2 foo - >>>> print obj.Count - 3 - -Conversion of parameters -======================== - -When calling a .NET method Python objects are converted to .NET -objects. Lots of effort have been taken to make the conversion as -much transparent as possible; in particular, all the primitive types -such as int, float and string are converted to the corresponding .NET -types (e.g., ``System.Int32``, ``System.Float64`` and -``System.String``). - -Python objects without a corresponding .NET types (e.g., instances of -user classes) are passed as "black boxes", for example to be stored in -some sort of collection. - -The opposite .NET to Python conversions happens for the values returned -by the methods. Again, primitive types are converted in a -straightforward way; non-primitive types are wrapped in a Python object, -so that they can be treated as usual. - -Overload resolution -=================== - -When calling an overloaded method, PyPy.NET tries to find the best -overload for the given arguments; for example, consider the -``System.Math.Abs`` method:: - - - >>>> from System import Math - >>>> Math.Abs(-42) - 42 - >>>> Math.Abs(-42.0) - 42.0 - -``System.Math.Abs`` has got overloadings both for integers and floats: -in the first case we call the method ``System.Math.Abs(int32)``, while -in the second one we call the method ``System.Math.Abs(float64)``. - -If the system can't find a best overload for the given parameters, a -TypeError exception is raised. - - -Generic classes -================ - -Generic classes are fully supported. To instantiate a generic class, you need -to use the ``[]`` notation:: - - >>>> from System.Collections.Generic import List - >>>> mylist = List[int]() - >>>> mylist.Add(42) - >>>> mylist.Add(43) - >>>> mylist.Add("foo") - Traceback (most recent call last): - File "", line 1, in - TypeError: No overloads for Add could match - >>>> mylist[0] - 42 - >>>> for item in mylist: print item - 42 - 43 - - -External assemblies and Windows Forms -===================================== - -By default, you can only import .NET namespaces that belongs to already loaded -assemblies. To load additional .NET assemblies, you can use -``clr.AddReferenceByPartialName``. The following example loads -``System.Windows.Forms`` and ``System.Drawing`` to display a simple Windows -Form displaying the usual "Hello World" message:: - - >>>> import clr - >>>> clr.AddReferenceByPartialName("System.Windows.Forms") - >>>> clr.AddReferenceByPartialName("System.Drawing") - >>>> from System.Windows.Forms import Application, Form, Label - >>>> from System.Drawing import Point - >>>> - >>>> frm = Form() - >>>> frm.Text = "The first pypy-cli Windows Forms app ever" - >>>> lbl = Label() - >>>> lbl.Text = "Hello World!" - >>>> lbl.AutoSize = True - >>>> lbl.Location = Point(100, 100) - >>>> frm.Controls.Add(lbl) - >>>> Application.Run(frm) - -Unfortunately at the moment you can't do much more than this with Windows -Forms, because we still miss support for delegates and so it's not possible -to handle events. diff --git a/pypy/doc/config/objspace.allworkingmodules.txt b/pypy/doc/config/objspace.allworkingmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.allworkingmodules.txt +++ /dev/null @@ -1,6 +0,0 @@ -This option enables the usage of all modules that are known to be working well -and that translate without problems. - -Note that this option defaults to True (except when running -``py.py`` because it takes a long time to start). To force it -to False, use ``--no-allworkingmodules``. diff --git a/pypy/doc/config/translation.noprofopt.txt b/pypy/doc/config/translation.noprofopt.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules.fcntl.txt b/pypy/doc/config/objspace.usemodules.fcntl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.fcntl.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'fcntl' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.math.txt b/pypy/doc/config/objspace.usemodules.math.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.math.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'math' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.txt b/pypy/doc/config/objspace.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/objspace.usemodules.array.txt b/pypy/doc/config/objspace.usemodules.array.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.array.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use interpreter-level version of array module (on by default). diff --git a/pypy/doc/config/translation.cli.exception_transformer.txt b/pypy/doc/config/translation.cli.exception_transformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.exception_transformer.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the exception transformer instead of the native .NET exceptions to -implement RPython exceptions. Enable this option only if you know what -you are doing. diff --git a/pypy/doc/getting-started-python.txt b/pypy/doc/getting-started-python.txt deleted file mode 100644 --- a/pypy/doc/getting-started-python.txt +++ /dev/null @@ -1,302 +0,0 @@ -============================================== -Getting Started with PyPy's Python Interpreter -============================================== - -.. contents:: -.. sectnum:: - -PyPy's Python interpreter is a very compliant Python -interpreter implemented in Python. When translated to C, it passes most of -`CPythons core language regression tests`_ and comes with many of the extension -modules included in the standard library including ``ctypes``. It can run large -libraries such as Django_ and Twisted_. There are some small behavioral -differences to CPython and some missing extensions, for details see `CPython -differences`_. - -.. _Django: http://djangoproject.org -.. _Twisted: http://twistedmatrix.com - -.. _`CPython differences`: cpython_differences.html - -To actually use PyPy's Python interpreter, the first thing you typically do is -translate it to get a reasonably performing interpreter. This is described in -the next section. If you just want to play around a bit, you can also try -untranslated `py.py interpreter`_ (which is extremely slow, but still fast -enough for tiny examples). - -Translating the PyPy Python interpreter ---------------------------------------- - -(**Note**: for some hints on how to translate the Python interpreter under -Windows, see the `windows document`_) - -.. _`windows document`: windows.html - -You can translate the whole of PyPy's Python interpreter to low level C code, -`CLI code`_, or `JVM code`_. - -1. Install dependencies. You need (these are Debian package names, - adapt as needed): - - * ``gcc`` - * ``python-dev`` - * ``python-ctypes`` if you are still using Python2.4 - * ``libffi-dev`` - * ``pkg-config`` (to help us locate libffi files) - * ``libz-dev`` (for the optional ``zlib`` module) - * ``libbz2-dev`` (for the optional ``bz2`` module) - * ``libncurses-dev`` (for the optional ``_minimal_curses`` module) - * ``libexpat1-dev`` (for the optional ``pyexpat`` module) - * ``libssl-dev`` (for the optional ``_ssl`` module) - * ``libgc-dev`` (Boehm: only when translating with `--opt=0, 1` or `size`) - -2. Translation is somewhat time-consuming (30 min to - over one hour) and RAM-hungry. If you have less than 1.5 GB of - RAM (or a slow machine) you might want to pick the - `optimization level`_ `1` in the next step. A level of - `2` or `3` or `jit` gives much better results, though. - - Let me stress this another time: at ``--opt=1`` you get the Boehm - GC, which is here mostly for historical and for testing reasons. - You really do not want to pick it. The resulting ``pypy-c`` is - slow. - -3. Run:: - - cd pypy/translator/goal - python translate.py --opt=jit targetpypystandalone.py - - possibly replacing ``--opt=jit`` with another `optimization level`_ - of your choice like ``--opt=2`` if you do not want the included JIT - compiler. (As of March 2010, the default level is ``--opt=2``, and - ``--opt=jit`` requires an Intel **32-bit** environment.) - -.. _`optimization level`: config/opt.html - -If everything works correctly this will create an executable -``pypy-c`` in the current directory. Type ``pypy-c --help`` -to see the options it supports - mainly the same basic -options as CPython. In addition, ``pypy-c --info`` prints the -translation options that where used to produce this particular -executable. The executable behaves mostly like a normal Python interpreter:: - - $ ./pypy-c - Python 2.5.2 (64177, Apr 16 2009, 16:33:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``this sentence is false'' - >>>> 46 - 4 - 42 - >>>> from test import pystone - >>>> pystone.main() - Pystone(1.1) time for 50000 passes = 2.57 - This machine benchmarks at 19455.3 pystones/second - >>>> - -This executable can be moved around or copied on other machines; see -Installation_ below. For now a JIT-enabled ``pypy-c`` always produces -debugging output to stderr when it exits, unless translated with -``--jit-debug=off``. - -The ``translate.py`` script takes a very large number of options controlling -what to translate and how. See ``translate.py -h``. Some of the more -interesting options (but for now incompatible with the JIT) are: - - * ``--stackless``: this produces a pypy-c that includes features - inspired by `Stackless Python `__. - - * ``--gc=boehm|ref|marknsweep|semispace|generation|hybrid``: - choose between using - the `Boehm-Demers-Weiser garbage collector`_, our reference - counting implementation or four of own collector implementations - (the default depends on the optimization level). - -Find a more detailed description of the various options in our `configuration -sections`_. - -.. _`configuration sections`: config/index.html - -.. _`translate PyPy with the thunk object space`: - -Translating with non-standard options -++++++++++++++++++++++++++++++++++++++++ - -It is possible to have non-standard features enabled for translation, -but they are not really tested any more. Look for example at the -`objspace proxies`_ document. - -.. _`objspace proxies`: objspace-proxies.html - -.. _`CLI code`: - -Translating using the CLI backend -+++++++++++++++++++++++++++++++++ - -To create a standalone .NET executable using the `CLI backend`_:: - - ./translate.py --backend=cli targetpypystandalone.py - -Or better, try out the experimental `branch/cli-jit`_ described by -Antonio Cuni's `Ph.D. thesis`_ and translate with the JIT:: - - ./translate.py -Ojit --backend=cli targetpypystandalone.py - -.. _`branch/cli-jit`: http://codespeak.net/svn/pypy/branch/cli-jit/ -.. _`Ph.D. thesis`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf - -The executable and all its dependencies will be stored in the -./pypy-cli-data directory. To run pypy.NET, you can run -./pypy-cli-data/main.exe. If you are using Linux or Mac, you can use -the convenience ./pypy-cli script:: - - $ ./pypy-cli - Python 2.5.2 (64219, Apr 17 2009, 13:54:38) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``distopian and utopian chairs'' - >>>> - -Moreover, at the moment it's not possible to do the full translation -using only the tools provided by the Microsoft .NET SDK, since -``ilasm`` crashes when trying to assemble the pypy-cli code due to its -size. Microsoft .NET SDK 2.0.50727.42 is affected by this bug; other -version could be affected as well: if you find a version of the SDK -that works, please tell us. - -Windows users that want to compile their own pypy-cli can install -Mono_: if a Mono installation is detected the translation toolchain -will automatically use its ``ilasm2`` tool to assemble the -executables. - -To try out the experimental .NET integration, check the documentation of the -clr_ module. - -.. _`JVM code`: - -Translating using the JVM backend -+++++++++++++++++++++++++++++++++ - -To create a standalone JVM executable:: - - ./translate.py --backend=jvm targetpypystandalone.py - -This will create a jar file ``pypy-jvm.jar`` as well as a convenience -script ``pypy-jvm`` for executing it. To try it out, simply run -``./pypy-jvm``:: - - $ ./pypy-jvm - Python 2.5.2 (64214, Apr 17 2009, 08:11:23) - [PyPy 1.1.0] on darwin - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``# assert did not crash'' - >>>> - -Alternatively, you can run it using ``java -jar pypy-jvm.jar``. At the moment -the executable does not provide any interesting features, like integration with -Java. - -Installation -++++++++++++ - -A prebuilt ``pypy-c`` can be installed in a standard location like -``/usr/local/bin``, although some details of this process are still in -flux. It can also be copied to other machines as long as their system -is "similar enough": some details of the system on which the translation -occurred might be hard-coded in the executable. - -For installation purposes, note that the executable needs to be able to -find its version of the Python standard library in the following three -directories: ``lib-python/2.5.2``, ``lib-python/modified-2.5.2`` and -``lib_pypy``. They are located by "looking around" starting from the -directory in which the executable resides. The current logic is to try -to find a ``PREFIX`` from which the directories -``PREFIX/lib-python/2.5.2`` and ``PREFIX/lib-python/modified.2.5.2`` and -``PREFIX/lib_pypy`` can all be found. The prefixes that are tried are:: - - . - ./lib/pypy1.2 - .. - ../lib/pypy1.2 - ../.. - ../../lib/pypy-1.2 - ../../.. - etc. - -In order to use ``distutils`` or ``setuptools`` a directory ``PREFIX/site-packages`` needs to be created. Here's an example session setting up and using ``easy_install``:: - - $ cd PREFIX - $ mkdir site-packages - $ curl -sO http://peak.telecommunity.com/dist/ez_setup.py - $ bin/pypy-c ez_setup.py - ... - $ bin/easy_install WebOb - $ bin/pypy-c - Python 2.5.2 (64714, Apr 27 2009, 08:16:13) - [PyPy 1.1.0] on linux2 - Type "help", "copyright", "credits" or "license" for more information. - And now for something completely different: ``PyPy doesn't have copolyvariadic dependently-monomorphed hyperfluxads'' - >>>> import webob - >>>> - -.. _`py.py interpreter`: - -Running the Python Interpreter Without Translation ---------------------------------------------------- - -The py.py interpreter -+++++++++++++++++++++ - -To start interpreting Python with PyPy, install a C compiler that is -supported by distutils and use Python 2.4 or greater to run PyPy:: - - cd pypy - python bin/py.py - -After a few seconds (remember: this is running on top of CPython), -you should be at the PyPy prompt, which is the same as the Python -prompt, but with an extra ">". - -Now you are ready to start running Python code. Most Python -modules should work if they don't involve CPython extension -modules. **This is slow, and most C modules are not present by -default even if they are standard!** Here is an example of -determining PyPy's performance in pystones:: - - >>>> from test import pystone - >>>> pystone.main(10) - -The parameter is the number of loops to run through the test. The -default is 50000, which is far too many to run in a non-translated -PyPy version (i.e. when PyPy's interpreter itself is being interpreted -by CPython). - -py.py options -+++++++++++++ - -To list the PyPy interpreter command line options, type:: - - cd pypy - python bin/py.py --help - -py.py supports most of the options that CPython supports too (in addition to a -large amount of options that can be used to customize py.py). -As an example of using PyPy from the command line, you could type:: - - python py.py -c "from test import pystone; pystone.main(10)" - -Alternatively, as with regular Python, you can simply give a -script name on the command line:: - - python py.py ../../lib-python/2.5.2/test/pystone.py 10 - -See our `configuration sections`_ for details about what all the commandline -options do. - - -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _clr: clr-module.html -.. _`CPythons core language regression tests`: http://codespeak.net:8099/summary?category=applevel&branch=%3Ctrunk%3E - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt b/pypy/doc/config/translation.builtins_can_raise_exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/translation.builtins_can_raise_exceptions.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/discussion/summer-of-pypy-pytest.txt b/pypy/doc/discussion/summer-of-pypy-pytest.txt deleted file mode 100644 --- a/pypy/doc/discussion/summer-of-pypy-pytest.txt +++ /dev/null @@ -1,56 +0,0 @@ -============================================ -Summer of PyPy proposal: Distributed py.test -============================================ - - -Purpose: -======== - -The main purpose of distributing py.test is to speedup tests -of actual applications (running all pypy tests already takes -ages). - -Method: -======= - -Remote imports: ---------------- - -On the beginning of communication, master server sends to client -import hook code, which then can import all needed libraries. - -Libraries are uploaded server -> client if they're needed (when -__import__ is called). Possible extension is to add some kind of -checksum (md5?) and store files in some directory. - -Previous experiments: ---------------------- - -Previous experiments tried to run on the lowest level - when function/ -method is called. This is pretty clear (you run as few code on client -side as possible), but has got some drawbacks: - -- You must simulate *everything* and transform it to server side in - case of need of absolutely anything (tracebacks, short and long, - source code etc.) -- It's sometimes hard to catch exceptions. -- Top level code in testing module does not work at all. - -Possible approach: ------------------- - -On client side (side really running tests) run some kind of cut-down -session, which is imported by remote import at the very beginning and -after that, we run desired tests (probably by importing whole test -file which allows us to have top-level imports). - -Then we transfer output data to server as string, possibly tweaking -file names (which is quite easy). - -Deliverables: -============= - -- better use of testing machines -- cut down test time -- possible extension to run distributed code testing, by running and - controlling several distributed parts on different machines. diff --git a/pypy/doc/config/translation.sandbox.txt b/pypy/doc/config/translation.sandbox.txt deleted file mode 100644 --- a/pypy/doc/config/translation.sandbox.txt +++ /dev/null @@ -1,15 +0,0 @@ -Generate a special fully-sandboxed executable. - -The fully-sandboxed executable cannot be run directly, but -only as a subprocess of an outer "controlling" process. The -sandboxed process is "safe" in the sense that it doesn't do -any library or system call - instead, whenever it would like -to perform such an operation, it marshals the operation name -and the arguments to its stdout and it waits for the -marshalled result on its stdin. This controller process must -handle these operation requests, in any way it likes, allowing -full virtualization. - -For examples of controller processes, see -``pypy/translator/sandbox/interact.py`` and -``pypy/translator/sandbox/pypy_interact.py``. diff --git a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt b/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.raisingop2direct_call.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. Transformation required by the LLVM backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._winreg.txt b/pypy/doc/config/objspace.usemodules._winreg.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._winreg.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the built-in '_winreg' module, provides access to the Windows registry. -This module is expected to be working and is included by default on Windows. diff --git a/pypy/doc/config/objspace.usemodules._minimal_curses.txt b/pypy/doc/config/objspace.usemodules._minimal_curses.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._minimal_curses.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_curses' module. -This module is just a stub. It only implements a few functions. diff --git a/pypy/doc/glossary.txt b/pypy/doc/glossary.txt deleted file mode 100644 --- a/pypy/doc/glossary.txt +++ /dev/null @@ -1,237 +0,0 @@ -PyPy, like any large project, has developed a jargon of its own. This -document gives brief definition of some of these terms and provides -links to more information. - -**abstract interpretation** - The technique of interpreting the bytecode of a user program with - an interpreter that handles abstract objects instead of concrete ones. - It can be used to check the bytecode or see what it does, without - actually executing it with concrete values. See Theory_. - -.. _annotator: - -**annotator** - The component of the translator_\ 's toolchain_ that performs a form - of `type inference`_ on the flow graph. See the `annotator pass`_ - in the documentation. - -.. _`application level`: - -**application level** - applevel_ code is normal Python code running on top of the PyPy or - CPython_ interpreter (see `interpreter level`_) - -.. _backend: - -**backend** - Code generator that converts an `RPython - `__ program to a `target - language`_ using the PyPy toolchain_. A backend uses either the - lltypesystem_ or the ootypesystem_. - -.. _`compile-time`: - -**compile-time** - In the context of the JIT_, compile time is when the JIT is - generating machine code "just in time". - -.. _CPython: - -**CPython** - The "default" implementation of Python, written in C and - distributed by the PSF_ on http://www.python.org. - -.. _`external function`: - -**external function** - Functions that we don't want to implement in Python for various - reasons (e.g. they need to make calls into the OS) and whose - implementation will be provided by the backend. - -.. _`garbage collection framework`: - -**garbage collection framework** - Code that makes it possible to write `PyPy's garbage collectors`_ - in Python itself. - -.. _`interpreter level`: - -**interpreter level** - Code running at this level is part of the implementation of the - PyPy interpreter and cannot interact normally with `application - level`_ code; it typically provides implementation for an object - space and its builtins. - -.. _`jit`: - -**jit** - `just in time compiler`_. - -.. _llinterpreter: - -**llinterpreter** - Piece of code that is able to interpret flow graphs. This is very - useful for testing purposes, especially if you work on the RPython_ - Typer. - -.. _lltypesystem: - -**lltypesystem** - A `C-like type model `__ that contains - structs and pointers. A backend_ that uses this type system is also - called a low-level backend. The C backend uses this - typesystem. - -.. _`low-level helper`: - -**low-level helper** - A function that the RTyper_ can use a call to as part of implementing - some operation in terms of the target `type system`_. - -.. _`mixed module`: - -**mixed module** - a module that accesses PyPy's `interpreter level`_. The name comes - from the fact that the module's implementation can be a mixture of - `application level`_ and `interpreter level`_ code. - -.. _`object space`: - -**multimethod** - A callable object that invokes a different Python function based - on the type of all its arguments (instead of just the class of the - first argument, as with normal methods). See Theory_. - -**object space** - The `object space `__ (often abbreviated to - "objspace") creates all objects and knows how to perform operations - on the objects. You may think of an object space as being a library - offering a fixed API, a set of operations, with implementations - that a) correspond to the known semantics of Python objects, b) - extend or twist these semantics, or c) serve whole-program analysis - purposes. - -.. _ootypesystem: - -**ootypesystem** - An `object oriented type model `__ - containing classes and instances. A backend_ that uses this type system - is also called a high-level backend. The JVM and CLI backends - all use this typesystem. - -.. _`prebuilt constant`: - -**prebuilt constant** - In RPython_ module globals are considered constants. Moreover, - global (i.e. prebuilt) lists and dictionaries are supposed to be - immutable ("prebuilt constant" is sometimes abbreviated to "pbc"). - -.. _`rpython`: - -.. _`promotion`: - -**promotion** - JIT_ terminology. *promotion* is a way of "using" a `run-time`_ - value at `compile-time`_, essentially by deferring compilation - until the run-time value is known. See if `the jit docs`_ help. - -**rpython** - `Restricted Python`_, a limited subset of the Python_ language. - The limitations make `type inference`_ possible. - It is also the language that the PyPy interpreter itself is written - in. - -.. _`rtyper`: - -**rtyper** - Based on the type annotations, the `RPython Typer`_ turns the flow - graph into one that fits the model of the target platform/backend_ - using either the lltypesystem_ or the ootypesystem_. - -.. _`run-time`: - -**run-time** - In the context of the JIT_, run time is when the code the JIT has - generated is executing. - -.. _`specialization`: - -**specialization** - A way of controlling how a specific function is handled by the - annotator_. One specialization is to treat calls to a function - with different argument types as if they were calls to different - functions with identical source. - -.. _`stackless`: - -**stackless** - Technology that enables various forms of non conventional control - flow, such as coroutines, greenlets and tasklets. Inspired by - Christian Tismer's `Stackless Python `__. - -.. _`standard interpreter`: - -**standard interpreter** - It is the `subsystem implementing the Python language`_, composed - of the bytecode interpreter and of the standard objectspace. - -.. _toolchain: - -**timeshifting** - JIT_ terminology. *timeshifting* is to do with moving from the - world where there are only `run-time`_ operations to a world where - there are both `run-time`_ and `compile-time`_ operations. - -**toolchain** - The `annotator pass`_, `The RPython Typer`_, and various - `backends`_. - -.. _`transformation`: - -**transformation** - Code that modifies flowgraphs to weave in `translation-aspects`_ - -.. _`translation-time`: - -**translation-time** - In the context of the JIT_, translation time is when the PyPy - source is being analyzed and the JIT itself is being created. - -.. _`translator`: - -**translator** - Tool_ based on the PyPy interpreter which can translate - sufficiently static Python programs into low-level code. - -.. _`type system`: - -**type system** - The RTyper can target either the lltypesystem_ or the ootypesystem_. - -.. _`type inference`: - -**type inference** - Deduces either partially or fully the type of expressions as - described in this `type inference article on Wikipedia`_. - PyPy's tool-chain own flavour of type inference is described - in the `annotator pass`_ section. - -.. _applevel: coding-guide.html#application-level -.. _`target language`: getting-started-dev.html#trying-out-the-translator -.. _`just in time compiler`: jit/index.html -.. _`the jit docs`: jit/index.html -.. _`type inference article on Wikipedia`: http://en.wikipedia.org/wiki/Type_inference -.. _`annotator pass`: translation.html#the-annotation-pass -.. _`The RPython Typer`: translation.html#the-rpython-typer -.. _`backends`: getting-started-dev.html#trying-out-the-translator -.. _Tool: getting-started-dev.html#trying-out-the-translator -.. _`translation-aspects`: translation-aspects.html -.. _`PyPy's garbage collectors`: garbage_collection.html -.. _`Restricted Python`: coding-guide.html#restricted-python -.. _PSF: http://www.python.org/psf/ -.. _Python: http://www.python.org -.. _`RPython Typer`: rtyper.html -.. _`subsystem implementing the Python language`: architecture.html#standard-interpreter -.. _Theory: theory.html - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.ootype.mangle.txt b/pypy/doc/config/translation.ootype.mangle.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.mangle.txt +++ /dev/null @@ -1,3 +0,0 @@ -Mangle the names of user defined attributes of the classes, in order -to ensure that every name is unique. Default is true, and it should -not be turned off unless you know what you are doing. diff --git a/pypy/doc/discussion/security-ideas.txt b/pypy/doc/discussion/security-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/security-ideas.txt +++ /dev/null @@ -1,312 +0,0 @@ -============== -Security ideas -============== - -These are some notes I (Armin) took after a talk at Chalmers by Steve -Zdancewic: "Encoding Information Flow in Haskell". That talk was -presenting a pure Haskell approach with monad-like constructions; I -think that the approach translates well to PyPy at the level of RPython. - - -The problem ------------ - -The problem that we try to solve here is: how to give the programmer a -way to write programs that are easily checked to be "secure", in the -sense that bugs shouldn't allow confidential information to be -unexpectedly leaked. This is not security as in defeating actively -malicious attackers. - - -Example -------- - -Let's suppose that we want to write a telnet-based application for a -bidding system. We want normal users to be able to log in with their -username and password, and place bids (i.e. type in an amount of money). -The server should record the highest bid so far but not allow users to -see that number. Additionally, the administrator should be able to log -in with his own password and see the highest bid. The basic program:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin() - - def serve_guest(): - global highest_bid - print "Enter your bid:" - n = int(raw_input()) - if n > highest_bid: # - highest_bid = n # - print "Thank you" - - def serve_admin(): - print "Highest big is:", highest_bid - -The goal is to make this program more secure by declaring and enforcing -the following properties: first, the guest code is allowed to manipulate -the highest_bid, as in the lines marked with ``#``, but these lines must -not leak back the highest_bid in a form visible to the guest user; -second, the printing in serve_admin() must only be allowed if the user -that logged in is really the administrator (e.g. catch bugs like -accidentally swapping the serve_guest() and serve_admin() calls in -mainloop()). - - -Preventing leak of information in guest code: 1st try ------------------------------------------------------ - -The basic technique to prevent leaks is to attach "confidentiality -level" tags to objects. In this example, the highest_bid int object -would be tagged with label="secret", e.g. by being initialized as:: - - highest_bid = tag(0, label="secret") - -At first, we can think about an object space where all objects have such -a label, and the label propagates to operations between objects: for -example, code like ``highest_bid += 1`` would produce a new int object -with again label="secret". - -Where this approach doesn't work is with if/else or loops. In the above -example, we do:: - - if n > highest_bid: - ... - -However, by the object space rules introduced above, the result of the -comparison is a "secret" bool objects. This means that the guest code -cannot know if it is True or False, and so the PyPy interpreter has no -clue if it must following the ``then`` or ``else`` branch of the ``if``. -So the guest code could do ``highest_bid += 1`` and probably even -``highest_bid = max(highest_bid, n)`` if max() is a clever enough -built-in function, but clearly this approach doesn't work well for more -complicated computations that we would like to perform at this point. - -There might be very cool possible ideas to solve this with doing some -kind of just-in-time flow object space analysis. However, here is a -possibly more practical approach. Let's forget about the object space -tricks and start again. (See `Related work`_ for why the object space -approach doesn't work too well.) - - -Preventing leak of information in guest code with the annotator instead ------------------------------------------------------------------------ - -Suppose that the program runs on top of CPython and not necessarily -PyPy. We will only need PyPy's annotator. The idea is to mark the code -that manipulates highest_bid explicitly, and make it RPython in the -sense that we can take its flow space and follow the calls (we don't -care about the precise types here -- we will use different annotations). -Note that only the bits that manipulates the secret values needs to be -RPython. Example:: - - # on top of CPython, 'hidden' is a type that hides a value without - # giving any way to normal programs to access it, so the program - # cannot do anything with 'highest_bid' - - highest_bid = hidden(0, label="secure") - - def enter_bid(n): - if n > highest_bid.value: - highest_bid.value = n - - enter_bid = secure(enter_bid) - - def serve_guest(): - print "Enter your bid:" - n = int(raw_input()) - enter_bid(n) - print "Thank you" - -The point is that the expression ``highest_bid.value`` raises a -SecurityException when run normally: it is not allowed to read this -value. The secure() decorator uses the annotator on the enter_bid() -function, with special annotations that I will describe shortly. Then -secure() returns a "compiled" version of enter_bid. The compiled -version is checked to satisfy the security constrains, and it contains -special code that then enables the ``highest_bid.value`` to work. - -The annotations propagated by secure() are ``SomeSecurityLevel`` -annotations. Normal constants are propagated as -SomeSecurityLevel("public"). The ``highest_bid.value`` returns the -annotation SomeSecurityLevel("secret"), which is the label of the -constant ``highest_bid`` hidden object. We define operations between -two SomeSecurityLevels to return a SomeSecurityLevel which is the max of -the secret levels of the operands. - -The key point is that secure() checks that the return value is -SomeSecurityLevel("public"). It also checks that only -SomeSecurityLevel("public") values are stored e.g. in global data -structures. - -In this way, any CPython code like serve_guest() can safely call -``enter_bid(n)``. There is no way to leak information about the current -highest bid back out of the compiled enter_bid(). - - -Declassification ----------------- - -Now there must be a controlled way to leak the highest_bid value, -otherwise it is impossible even for the admin to read it. Note that -serve_admin(), which prints highest_bid, is considered to "leak" this -value because it is an input-output, i.e. it escapes the program. This -is a leak that we actually want -- the terminology is that serve_admin() -must "declassify" the value. - -To do this, there is a capability-like model that is easy to implement -for us. Let us modify the main loop as follows:: - - def mainloop(): - while True: - username = raw_input() - password = raw_input() - user, priviledge_token = authenticate(username, password) - if user == 'guest': - serve_guest() - elif user == 'admin': - serve_admin(priviledge_token) - del priviledge_token # make sure nobody else uses it - -The idea is that the authenticate() function (shown later) also returns -a "token" object. This is a normal Python object, but it should not be -possible for normal Python code to instantiate such an object manually. -In this example, authenticate() returns a ``priviledge("public")`` for -guests, and a ``priviledge("secret")`` for admins. Now -- and this is -the insecure part of this scheme, but it is relatively easy to control --- the programmer must make sure that these priviledge_token objects -don't go to unexpected places, particularly the "secret" one. They work -like capabilities: having a reference to them allows parts of the -program to see secret information, of a confidentiality level up to the -one corresponding to the token. - -Now we modify serve_admin() as follows: - - def serve_admin(token): - print "Highest big is:", declassify(highest_bid, token=token) - -The declassify() function reads the value if the "token" is privileged -enough, and raises an exception otherwise. - -What are we protecting here? The fact that we need the administrator -token in order to see the highest bid. If by mistake we swap the -serve_guest() and serve_admin() lines in mainloop(), then what occurs is -that serve_admin() would be called with the guest token. Then -declassify() would fail. If we assume that authenticate() is not buggy, -then the rest of the program is safe from leak bugs. - -There are another variants of declassify() that are convenient. For -example, in the RPython parts of the code, declassify() can be used to -control more precisely at which confidentiality levels we want which -values, if there are more than just two such levels. The "token" -argument could also be implicit in RPython parts, meaning "use the -current level"; normal non-RPython code always runs at "public" level, -but RPython functions could run with higher current levels, e.g. if they -are called with a "token=..." argument. - -(Do not confuse this with what enter_bid() does: enter_bid() runs at the -public level all along. It is ok for it to compute with, and even -modify, the highest_bid.value. The point of enter_bid() was that by -being an RPython function the annotator can make sure that the value, or -even anything that gives a hint about the value, cannot possibly escape -from the function.) - -It is also useful to have "globally trusted" administrator-level RPython -functions that always run at a higher level than the caller, a bit like -Unix programs with the "suid" bit. If we set aside the consideration -that it should not be possible to make new "suid" functions too easily, -then we could define the authenticate() function of our server example -as follows:: - - def authenticate(username, password): - database = {('guest', 'abc'): priviledge("public"), - ('admin', '123'): priviledge("secret")} - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public") - - authenticate = secure(authenticate, suid="secret") - -The "suid" argument makes the compiled function run on level "secret" -even if the caller is "public" or plain CPython code. The declassify() -in the function is allowed because of the current level of "secret". -Note that the function returns a "public" tuple -- the username is -public, and the token_obj is declassified to public. This is the -property that allows CPython code to call it. - -Of course, like a Unix suid program the authenticate() function could be -buggy and leak information, but like suid programs it is small enough -for us to feel that it is secure just by staring at the code. - -An alternative to the suid approach is to play with closures, e.g.:: - - def setup(): - #initialize new levels -- this cannot be used to access existing levels - public_level = create_new_priviledge("public") - secret_level = create_new_priviledge("secret") - - database = {('guest', 'abc'): public_level, - ('admin', '123'): secret_level} - - def authenticate(username, password): - token_obj = database[username, password] - return username, declassify(token_obj, target_level="public", - token=secret_level) - - return secure(authenticate) - - authenticate = setup() - -In this approach, declassify() works because it has access to the -secret_level token. We still need to make authenticate() a secure() -compiled function to hide the database and the secret_level more -carefully; otherwise, code could accidentally find them by inspecting -the traceback of the KeyError exception if the username or password is -invalid. Also, secure() will check for us that authenticate() indeed -returns a "public" tuple. - -This basic model is easy to extend in various directions. For example -secure() RPython functions should be allowed to return non-public -results -- but then they have to be called either with an appropriate -"token=..." keyword, or else they return hidden objects again. They -could also be used directly from other RPython functions, in which the -level of what they return is propagated. - - -Related work ------------- - -What I'm describing here is nothing more than an adaptation of existing -techniques to RPython. - -It is noteworthy to mention at this point why the object space approach -doesn't work as well as we could first expect. The distinction between -static checking and dynamic checking (with labels only attached to -values) seems to be well known; also, it seems to be well known that the -latter is too coarse in practice. The problem is about branching and -looping. From the object space' point of view it is quite hard to know -what a newly computed value really depends on. Basically, it is -difficult to do better than: after is_true() has been called on a secret -object, then we must assume that all objects created are also secret -because they could depend in some way on the truth-value of the previous -secret object. - -The idea to dynamically use static analysis is the key new idea -presented by Steve Zdancewic in his talk. You can have small controlled -RPython parts of the program that must pass through a static analysis, -and we only need to check dynamically that some input conditions are -satisfied when other parts of the program call the RPython parts. -Previous research was mostly about designing languages that are -completely statically checked at compile-time. The delicate part is to -get the static/dynamic mixture right so that even indirect leaks are not -possible -- e.g. leaks that would occur from calling functions with -strange arguments to provoke exceptions, and where the presence of the -exception or not would be information in itself. This approach seems to -do that reliably. (Of course, at the talk many people including the -speaker were wondering about ways to move more of the checking at -compile-time, but Python people won't have such worries :-) diff --git a/pypy/doc/discussion/ctypes_modules.txt b/pypy/doc/discussion/ctypes_modules.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_modules.txt +++ /dev/null @@ -1,65 +0,0 @@ -what is needed for various ctypes-based modules and how feasible they are -========================================================================== - -Quick recap for module evaluation: - -1. does the module use callbacks? - -2. how sophisticated ctypes usage is (accessing of _objects?) - -3. any specific tricks - -4. does it have tests? - -5. dependencies - -6. does it depend on cpython c-api over ctypes? - -Pygame -====== - -1. yes, for various things, but basic functionality can be achieved without - -2. probably not - -3. not that I know of - -4. yes for tests, no for unittests - -5. numpy, but can live without, besides only C-level dependencies. On OS/X - it requires PyObjC. - -6. no - - -PyOpenGL -======== - -1. yes, for GLX, but not for the core functionality - -2. probably not - -3. all the code is auto-generated - -4. it has example programs, no tests - -5. numpy, but can live without it. can use various surfaces (including pygame) to draw on - -6. no - - -Sqlite -====== - -1. yes, but I think it's not necessary - -2. no - -3. no - -4. yes - -5. datetime - -6. it passes py_object around in few places, not sure why (probably as an - opaque argument). diff --git a/pypy/doc/index.txt b/pypy/doc/index.txt deleted file mode 100644 --- a/pypy/doc/index.txt +++ /dev/null @@ -1,59 +0,0 @@ - -The PyPy project aims at producing a flexible and fast Python_ -implementation. The guiding idea is to translate a Python-level -description of the Python language itself to lower level languages. -Rumors have it that the secret goal is being faster-than-C which is -nonsense, isn't it? `more...`_ - -Getting into PyPy ... -============================================= - -* `Release 1.4`_: the latest official release - -* `PyPy Blog`_: news and status info about PyPy - -* `Documentation`_: extensive documentation and papers_ about PyPy. - -* `Getting Started`_: Getting started and playing with PyPy. - -Mailing lists, bug tracker, IRC channel -============================================= - -* `Development mailing list`_: development and conceptual - discussions. - -* `Subversion commit mailing list`_: updates to code and - documentation. - -* `Development bug/feature tracker`_: filing bugs and feature requests. - -* `Sprint mailing list`_: mailing list for organizing upcoming sprints. - -* **IRC channel #pypy on freenode**: Many of the core developers are hanging out - at #pypy on irc.freenode.net. You are welcome to join and ask questions - (if they are not already developed in the FAQ_). - You can find logs of the channel here_. - -.. XXX play1? - -Meeting PyPy developers -======================= - -The PyPy developers are organizing sprints and presenting results at -conferences all year round. They will be happy to meet in person with -anyone interested in the project. Watch out for sprint announcements -on the `development mailing list`_. - -.. _Python: http://docs.python.org/index.html -.. _`more...`: architecture.html#mission-statement -.. _`PyPy blog`: http://morepypy.blogspot.com/ -.. _`development bug/feature tracker`: https://codespeak.net/issue/pypy-dev/ -.. _here: http://tismerysoft.de/pypy/irc-logs/pypy -.. _`sprint mailing list`: http://codespeak.net/mailman/listinfo/pypy-sprint -.. _`subversion commit mailing list`: http://codespeak.net/mailman/listinfo/pypy-svn -.. _`development mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`FAQ`: faq.html -.. _`Documentation`: docindex.html -.. _`Getting Started`: getting-started.html -.. _papers: extradoc.html -.. _`Release 1.4`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.usemodules.zipimport.txt b/pypy/doc/config/objspace.usemodules.zipimport.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zipimport.txt +++ /dev/null @@ -1,3 +0,0 @@ -This module implements zipimport mechanism described -in PEP 302. It's supposed to work and translate, so it's included -by default \ No newline at end of file diff --git a/pypy/doc/config/objspace.usemodules._stackless.txt b/pypy/doc/config/objspace.usemodules._stackless.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._stackless.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_stackless' module. - -Exposes the `stackless` primitives, and also implies a stackless build. -See also :config:`translation.stackless`. - -.. _`stackless`: ../stackless.html diff --git a/pypy/doc/config/translation.jit_ffi.txt b/pypy/doc/config/translation.jit_ffi.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Internal option: enable OptFfiCall in the jit optimizations. diff --git a/pypy/doc/config/objspace.usemodules.cpyext.txt b/pypy/doc/config/objspace.usemodules.cpyext.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cpyext.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use (experimental) cpyext module, that tries to load and run CPython extension modules diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -1,7 +1,7 @@ import py import sys from pypy.rlib.jit import JitDriver, we_are_jitted, hint, dont_look_inside -from pypy.rlib.jit import OPTIMIZER_FULL, OPTIMIZER_SIMPLE, loop_invariant +from pypy.rlib.jit import loop_invariant from pypy.rlib.jit import jit_debug, assert_green, AssertGreenFailed from pypy.rlib.jit import unroll_safe, current_trace_length from pypy.jit.metainterp.warmspot import ll_meta_interp, get_stats @@ -15,11 +15,11 @@ from pypy.jit.metainterp.typesystem import LLTypeHelper, OOTypeHelper from pypy.rpython.lltypesystem import lltype, llmemory from pypy.rpython.ootypesystem import ootype +from pypy.jit.metainterp.optimizeopt import ALL_OPTS_DICT def _get_jitcodes(testself, CPUClass, func, values, type_system, supports_longlong=False, **kwds): from pypy.jit.codewriter import support, codewriter - from pypy.jit.metainterp import simple_optimize class FakeJitCell: __compiled_merge_points = [] @@ -37,11 +37,8 @@ return self._cell _cell = FakeJitCell() - # pick the optimizer this way - optimize_loop = staticmethod(simple_optimize.optimize_loop) - optimize_bridge = staticmethod(simple_optimize.optimize_bridge) - trace_limit = sys.maxint + enable_opts = ALL_OPTS_DICT func._jit_unroll_safe_ = True rtyper = support.annotate(func, values, type_system=type_system) @@ -1250,7 +1247,7 @@ x += inst.foo n -= 1 return x - res = self.meta_interp(f, [20], optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [20], enable_opts='') assert res == f(20) self.check_loops(call=0) @@ -1453,8 +1450,7 @@ m = m >> 1 return x - res = self.meta_interp(f, [50, 1], - optimizer=OPTIMIZER_SIMPLE) + res = self.meta_interp(f, [50, 1], enable_opts='') assert res == 42 def test_set_param(self): @@ -2419,12 +2415,12 @@ res = self.meta_interp(f, [1, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert not res res = self.meta_interp(f, [0, 100], policy=StopAtXPolicy(getcls), - optimizer=OPTIMIZER_SIMPLE) + enable_opts='') assert res class BaseLLtypeTests(BasicTests): @@ -2504,5 +2500,25 @@ self.meta_interp(main, []) + def test_enable_opts(self): + jitdriver = JitDriver(greens = [], reds = ['a']) + + class A(object): + def __init__(self, i): + self.i = i + + def f(): + a = A(0) + + while a.i < 10: + jitdriver.jit_merge_point(a=a) + jitdriver.can_enter_jit(a=a) + a = A(a.i + 1) + + self.meta_interp(f, []) + self.check_loops(new_with_vtable=0) + self.meta_interp(f, [], enable_opts='') + self.check_loops(new_with_vtable=1) + class TestLLtype(BaseLLtypeTests, LLJitMixin): pass diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -278,15 +278,14 @@ v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) + optimize_STRLEN = optimize_UNICODELEN = optimize_ARRAYLEN_GC + def optimize_STRGETITEM(self, op): self.emit_operation(op) v1 = self.getvalue(op.result) v1.intbound.make_ge(IntLowerBound(0)) v1.intbound.make_lt(IntUpperBound(256)) - optimize_STRLEN = optimize_ARRAYLEN_GC - optimize_UNICODELEN = optimize_ARRAYLEN_GC - def make_int_lt(self, box1, box2): v1 = self.getvalue(box1) v2 = self.getvalue(box2) diff --git a/pypy/doc/discussion/VM-integration.txt b/pypy/doc/discussion/VM-integration.txt deleted file mode 100644 --- a/pypy/doc/discussion/VM-integration.txt +++ /dev/null @@ -1,263 +0,0 @@ -============================================== -Integration of PyPy with host Virtual Machines -============================================== - -This document is based on the discussion I had with Samuele during the -Duesseldorf sprint. It's not much more than random thoughts -- to be -reviewed! - -Terminology disclaimer: both PyPy and .NET have the concept of -"wrapped" or "boxed" objects. To avoid confusion I will use "wrapping" -on the PyPy side and "boxing" on the .NET side. - -General idea -============ - -The goal is to find a way to efficiently integrate the PyPy -interpreter with the hosting environment such as .NET. What we would -like to do includes but it's not limited to: - - - calling .NET methods and instantiate .NET classes from Python - - - subclass a .NET class from Python - - - handle native .NET objects as transparently as possible - - - automatically apply obvious Python <--> .NET conversions when - crossing the borders (e.g. integers, string, etc.) - -One possible solution is the "proxy" approach, in which we manually -(un)wrap/(un)box all the objects when they cross the border. - -Example -------- - - :: - - public static int foo(int x) { return x} - - >>>> from somewhere import foo - >>>> print foo(42) - -In this case we need to take the intval field of W_IntObject, box it -to .NET System.Int32, call foo using reflection, then unbox the return -value and reconstruct a new (or reuse an existing one) W_IntObject. - -The other approach ------------------- - -The general idea to solve handle this problem is to split the -"stateful" and "behavioral" parts of wrapped objects, and use already -boxed values for storing the state. - -This way when we cross the Python --> .NET border we can just throw -away the behavioral part; when crossing .NET --> Python we have to -find the correct behavioral part for that kind of boxed object and -reconstruct the pair. - - -Split state and behaviour in the flowgraphs -=========================================== - -The idea is to write a graph transformation that takes an usual -ootyped flowgraph and split the classes and objects we want into a -stateful part and a behavioral part. - -We need to introduce the new ootypesystem type ``Pair``: it acts like -a Record but it hasn't its own identity: the id of the Pair is the id -of its first member. - - XXX about ``Pair``: I'm not sure this is totally right. It means - that an object can change identity simply by changing the value of a - field??? Maybe we could add the constraint that the "id" field - can't be modified after initialization (but it's not easy to - enforce). - - XXX-2 about ``Pair``: how to implement it in the backends? One - possibility is to use "struct-like" types if available (as in - .NET). But in this case it's hard to implement methods/functions - that modify the state of the object (such as __init__, usually). The - other possibility is to use a reference type (i.e., a class), but in - this case there will be a gap between the RPython identity (in which - two Pairs with the same state are indistinguishable) and the .NET - identity (in which the two objects will have a different identity, - of course). - -Step 1: RPython source code ---------------------------- - - :: - - class W_IntObject: - def __init__(self, intval): - self.intval = intval - - def foo(self, x): - return self.intval + x - - def bar(): - x = W_IntObject(41) - return x.foo(1) - - -Step 2: RTyping ---------------- - -Sometimes the following examples are not 100% accurate for the sake of -simplicity (e.g: we directly list the type of methods instead of the -ootype._meth instances that contains it). - -Low level types - - :: - - W_IntObject = Instance( - "W_IntObject", # name - ootype.OBJECT, # base class - {"intval": (Signed, 0)}, # attributes - {"foo": Meth([Signed], Signed)} # methods - ) - - -Prebuilt constants (referred by name in the flowgraphs) - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject) - 2. oosetfield(x, "meta", W_IntObject_meta_pbc) - 3. direct_call(W_IntObject.__init__, x, 41) - 4. result = oosend("foo", x, 1) - 5. return result - } - - W_IntObject.__init__(W_IntObject self, Signed intval) { - 1. oosetfield(self, "intval", intval) - } - - W_IntObject.foo(W_IntObject self, Signed x) { - 1. value = oogetfield(self, "value") - 2. result = int_add(value, x) - 3. return result - } - -Step 3: Transformation ----------------------- - -This step is done before the backend plays any role, but it's still -driven by its need, because at this time we want a mapping that tell -us what classes to split and how (i.e., which boxed value we want to -use). - -Let's suppose we want to map W_IntObject.intvalue to the .NET boxed -``System.Int32``. This is possible just because W_IntObject contains -only one field. Note that the "meta" field inherited from -ootype.OBJECT is special-cased because we know that it will never -change, so we can store it in the behaviour. - - -Low level types - - :: - - W_IntObject_bhvr = Instance( - "W_IntObject_bhvr", - ootype.OBJECT, - {}, # no more fields! - {"foo": Meth([W_IntObject_pair, Signed], Signed)} # the Pair is also explicitly passed - ) - - W_IntObject_pair = Pair( - ("value", (System.Int32, 0)), # (name, (TYPE, default)) - ("behaviour", (W_IntObject_bhvr, W_IntObject_bhvr_pbc)) - ) - - -Prebuilt constants - - :: - - W_IntObject_meta_pbc = (...) - W_IntObject.__init__ = (static method pbc - see below for the graph) - W_IntObject_bhvr_pbc = new(W_IntObject_bhvr); W_IntObject_bhvr_pbc.meta = W_IntObject_meta_pbc - W_IntObject_value_default = new System.Int32(0) - - -Flowgraphs - - :: - - bar() { - 1. x = new(W_IntObject_pair) # the behaviour has been already set because - # it's the default value of the field - - 2. # skipped (meta is already set in the W_IntObject_bhvr_pbc) - - 3. direct_call(W_IntObject.__init__, x, 41) - - 4. bhvr = oogetfield(x, "behaviour") - result = oosend("foo", bhvr, x, 1) # note that "x" is explicitly passed to foo - - 5. return result - } - - W_IntObject.__init__(W_IntObjectPair self, Signed value) { - 1. boxed = clibox(value) # boxed is of type System.Int32 - oosetfield(self, "value", boxed) - } - - W_IntObject.foo(W_IntObject_bhvr bhvr, W_IntObject_pair self, Signed x) { - 1. boxed = oogetfield(self, "value") - value = unbox(boxed, Signed) - - 2. result = int_add(value, x) - - 3. return result - } - - -Inheritance ------------ - -Apply the transformation to a whole class (sub)hierarchy is a bit more -complex. Basically we want to mimic the same hierarchy also on the -``Pair``\s, but we have to fight the VM limitations. In .NET for -example, we can't have "covariant fields":: - - class Base { - public Base field; - } - - class Derived: Base { - public Derived field; - } - -A solution is to use only kind of ``Pair``, whose ``value`` and -``behaviour`` type are of the most precise type that can hold all the -values needed by the subclasses:: - - class W_Object: pass - class W_IntObject(W_Object): ... - class W_StringObject(W_Object): ... - - ... - - W_Object_pair = Pair(System.Object, W_Object_bhvr) - -Where ``System.Object`` is of course the most precise type that can -hold both ``System.Int32`` and ``System.String``. - -This means that the low level type of all the ``W_Object`` subclasses -will be ``W_Object_pair``, but it also means that we will need to -insert the appropriate downcasts every time we want to access its -fields. I'm not sure how much this can impact performances. - - diff --git a/pypy/doc/eventhistory.txt b/pypy/doc/eventhistory.txt deleted file mode 100644 --- a/pypy/doc/eventhistory.txt +++ /dev/null @@ -1,313 +0,0 @@ - - - The PyPy project is a worldwide collaborative effort and its - members are organizing sprints and presenting results at conferences - all year round. **This page is no longer maintained!** See `our blog`_ - for upcoming events. - -.. _`our blog`: http://morepypy.blogspot.com/ - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint took place right after the EuroPython -Conference from the *6th to the 9th of July*. - -Read more in the `EuroPython 2006 sprint report`_. - -.. _`EuroPython 2006 sprint report`: http://codespeak.net/pypy/extradoc/sprintinfo/post-ep2006/report.txt - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy presented experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - -PyPy sprint at Akihabara (Tokyo, Japan) -================================================================== - -*April 23rd - 29th 2006.* This sprint was in Akihabara, Tokyo, Japan, -our hosts was FSIJ (Free Software Initiative of Japan) and we aimed -for the sprint to promote Python and introduce people to PyPy. Good -progress was also made on PyPy's ootypesystem for the more high level -backends. For more details, read the last `sprint status`_ page and -enjoy the pictures_. - -.. _`sprint status`: http://codespeak.net/pypy/extradoc/sprintinfo/tokyo/tokyo-planning.html -.. _`pictures`: http://www.flickr.com/photos/19046555 at N00/sets/72057594116388174/ - -PyPy at Python UK/ACCU Conference (United Kingdom) -=================================================================== - -*April 19th - April 22nd 2006.* Several talks about PyPy were hold at -this year's Python UK/ACCU conference. Read more at the `ACCU site`_. - -.. _`ACCU site`: http://www.accu.org/ - -PyPy at XPDay France 2006 in Paris March 23rd - March 24th 2006 -================================================================== - -Logilab presented PyPy at the first `french XP Day`_ that it was -sponsoring and which was held in Paris. There was over a hundred -attendants. Interesting talks included Python as an agile language and -Tools for continuous integration. - -.. _`french XP Day`: http://www.xpday.fr/ - -Logic Sprint at Louvain-la-Neuve University (Louvain-la-Neuve, Belgium) -======================================================================== - -*March 6th - March 10th 2006.* PyPy developers focusing on adding -logic programming to PyPy will met with the team that developed the Oz -programming language and the Mozart interpreter. - -Read the report_ and the original announcement_. - -.. _report: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/report.html -.. _announcement: http://codespeak.net/pypy/extradoc/sprintinfo/louvain-la-neuve-2006/sprint-announcement.html - -PyCon Sprint 2006 (Dallas, Texas, USA) -================================================================== - -*Feb 27th - March 2nd 2006.* The Post-PyCon PyPy Sprint took place -right after PyCon 2006. - -A report is coming up. - - -Talks at PyCon 2006 (Dallas, Texas, USA) -=================================================================== - -*Feb 24th - Feb 26th 2006.* PyPy developers spoke at `PyCon 2006`_. - -.. _`PyCon 2006`: http://us.pycon.org/TX2006/HomePage - - -PyPy at Solutions Linux in Paris January 31st - February 2nd 2006 -=================================================================== - -PyPy developers from Logilab presented the intermediate results of the -project during the Solutions Linux tradeshow in Paris. A lot of -enthusiasts already knew about the project and were eager to learn -about the details. Many people discovered PyPy on this occasion and -said they were interested in the outcome and would keep an eye on its -progress. Read the `talk slides`_. - -.. _`talk slides`: http://codespeak.net/pypy/extradoc/talk/solutions-linux-paris-2006.html - - -PyPy Sprint in Palma De Mallorca 23rd - 29th January 2006 -=================================================================== - -The Mallorca sprint that took place in Palma de Mallorca is over. -Topics included progressing with the JIT work started in Göteborg -and Paris, GC and optimization work, stackless, and -improving our way to write glue code for C libraries. - -Read more in `the announcement`_, there is a `sprint report`_ -for the first three days and `one for the rest of the sprint`_. - - -.. _`the announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/mallorca/sprint-announcement.html -.. _`sprint report`: http://codespeak.net/pipermail/pypy-dev/2006q1/002746.html -.. _`one for the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2006q1/002749.html - -Preliminary EU reports released -=============================== - -After many hours of writing and typo-hunting we finally finished the -`reports for the EU`_. They contain most of the material found on our regular -documentation page but also a lot of new material not covered there. Note that -all these documents are not approved by the European Union and therefore only -preliminary. *(01/06/2006)* - -.. _`reports for the EU`: index-report.html - - -PyPy Sprint in Göteborg 7th - 11th December 2005 -================================================= - -The Gothenburg sprint is over. It was a very productive sprint: work has -been started on a JIT prototype, we added support for __del__ in PyPy, -the socket module had some progress, PyPy got faster and work was started to -expose the internals of our parser and bytecode compiler to the user. -Michael and Carl have written a `report about the first half`_ and `one about -the second half`_ of the sprint. *(12/18/2005)* - -.. _`report about the first half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002656.html -.. _`one about the second half`: http://codespeak.net/pipermail/pypy-dev/2005q4/002660.html - -PyPy release 0.8.0 -=================== - -The third PyPy release is out, with an integrated and translatable -compiler, speed progress, and now the possibility to translate our -experimental "Thunk" object space (supporting lazy computed objects) -with its features preserved. - -See the `release 0.8 announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. There is also a short FAQ_. *(11/03/2005)* - -.. _`release 0.8 announcement`: release-0.8.0.html - -PyPy Sprint in Paris 10th-16th October 2005 -======================================================== - -The Paris sprint is over. We are all at home again and more or less exhausted. -The sprint attracted 18 participants and took place in -`Logilab offices in Paris`_. We were happy to have five new -developers to the PyPy Community! The focus was on implementing -`continuation-passing`_ style (stackless), making the translation process -work for target languages with more powerful object systems and some tiny -steps into the JIT_ direction. Michael and Carl have written -a `report about day one`_ and `one about day two and three`_. -Together with Armin they wrote one about `the rest of the sprint`_ on the -way back. -*(10/18/2005)* - -.. _`Logilab offices in Paris`: http://codespeak.net/pypy/extradoc/sprintinfo/paris-2005-sprint.html -.. _JIT: http://en.wikipedia.org/wiki/Just-in-time_compilation -.. _`continuation-passing`: http://en.wikipedia.org/wiki/Continuation_passing_style -.. _`report about day one`: http://codespeak.net/pipermail/pypy-dev/2005q4/002510.html -.. _`one about day two and three`: http://codespeak.net/pipermail/pypy-dev/2005q4/002512.html -.. _`the rest of the sprint`: http://codespeak.net/pipermail/pypy-dev/2005q4/002514.html - -PyPy release 0.7.0 -=================== - -The first implementation of Python in Python is now also the second -implementation of Python in C :-) - -See the `release announcement`_ for further details about the release and -the `getting started`_ document for instructions about downloading it and -trying it out. We also have the beginning of a FAQ_. *(08/28/2005)* - -.. _`pypy-0.7.0`: -.. _`release announcement`: release-0.7.0.html -.. _`getting started`: getting-started.html -.. _FAQ: faq.html - -PyPy Sprint in Heidelberg 22nd-29th August 2005 -========================================================== - -The last `PyPy sprint`_ took place at the Heidelberg University -in Germany from 22nd August to 29th August (both days included). -Its main focus is translation of the whole PyPy interpreter -to a low level language and reaching 2.4.1 Python compliance. -The goal of the sprint is to release a first self-contained -PyPy-0.7 version. Carl has written a report about `day 1 - 3`_, -there are `some pictures`_ online and a `heidelberg summary report`_ -detailing some of the works that led to the successful release -of `pypy-0.7.0`_! - -.. _`heidelberg summary report`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-report.html -.. _`PyPy sprint`: http://codespeak.net/pypy/extradoc/sprintinfo/Heidelberg-sprint.html -.. _`day 1 - 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002287.html -.. _`some pictures`: http://codespeak.net/~hpk/heidelberg-sprint/ - -PyPy Hildesheim2 finished: first self-contained PyPy run! -=========================================================== - -Up until 31st August we were in a PyPy sprint at `Trillke-Gut`_. -Carl has written a `report about day 1`_, Holger -about `day 2 and day 3`_ and Carl again about `day 4 and day 5`_, -On `day 6`_ Holger reports the `breakthrough`_: PyPy runs -on its own! Hurray_!. And Carl finally reports about the winding -down of `day 7`_ which saw us relaxing, discussing and generally -having a good time. You might want to look at the selected -`pictures from the sprint`_. - -.. _`report about day 1`: http://codespeak.net/pipermail/pypy-dev/2005q3/002217.html -.. _`day 2 and day 3`: http://codespeak.net/pipermail/pypy-dev/2005q3/002220.html -.. _`day 4 and day 5`: http://codespeak.net/pipermail/pypy-dev/2005q3/002234.html -.. _`day 6`: http://codespeak.net/pipermail/pypy-dev/2005q3/002239.html -.. _`day 7`: http://codespeak.net/pipermail/pypy-dev/2005q3/002245.html -.. _`breakthrough`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Thumbnails/36.jpg -.. _`hurray`: http://codespeak.net/~hpk/hildesheim2-sprint-www/hildesheim2-sprint-www-Pages/Image37.html -.. _`pictures from the sprint`: http://codespeak.net/~hpk/hildesheim2-sprint-www/ -.. _`Trillke-Gut`: http://www.trillke.net/images/HomePagePictureSmall.jpg - -EuroPython 2005 sprints finished -====================================================== - -We had two sprints around EuroPython, one more internal core -developer one and a public one. Both sprints were quite -successful. Regarding the Pre-EuroPython sprint Michael Hudson -has posted summaries of `day 1`_, `day 2`_ and `day 3`_ on -the `pypy-dev`_ mailing list. The larger public sprint -has not been summarized yet but it went very well. We had -20 people initially attending to hear the tutorials and -work a bit. Later with around 13-14 people we made the -move to Python-2.4.1, integrated the parser, improved -the LLVM backends and type inference in general. -*(07/13/2005)* - -.. _`day 1`: http://codespeak.net/pipermail/pypy-dev/2005q2/002169.html -.. _`day 2`: http://codespeak.net/pipermail/pypy-dev/2005q2/002171.html -.. _`day 3`: http://codespeak.net/pipermail/pypy-dev/2005q2/002172.html -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - -.. _EuroPython: http://europython.org -.. _`translation`: translation.html -.. _`sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-announcement.html -.. _`list of people coming`: http://codespeak.net/pypy/extradoc/sprintinfo/EP2005-people.html - -Duesseldorf PyPy sprint 2-9 June 2006 -================================================================== - -The next PyPy sprint will be held in the Computer Science department of -Heinrich-Heine Universitaet Duesseldorf from the *2nd to the 9th of June*. -Main focus of the sprint will be on the goals of the upcoming June 0.9 -release. - -Read more in `the sprint announcement`_, see who is planning to attend -on the `people page`_. - -.. _`the sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/announce.html -.. _`people page`: http://codespeak.net/pypy/extradoc/sprintinfo/ddorf2006/people.html - - -PyPy at XP 2006 and Agile 2006 -================================================================== - -PyPy will present experience reports at the two main agile conferences -this year, `XP 2006`_ and `Agile 2006`_. -Both experience reports focus on aspects of the sprint-driven -development method that is being used in PyPy. - -.. _`XP 2006`: http://virtual.vtt.fi/virtual/xp2006/ -.. _`Agile 2006`: http://www.agile2006.org/ - - -EuroPython PyPy sprint 6-9 July 2006 -================================================================== - -Once again a PyPy sprint will take place right after the EuroPython -Conference. This year it will be from the *6th to the 9th of July*. - -Read more in `EuroPython sprint announcement`_, see who is planning to attend -on `the people page`_. There is also a page_ in the python wiki. - -.. _`EuroPython sprint announcement`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/announce.html -.. _`the people page`: http://codespeak.net/pypy/extradoc/sprintinfo/europython-2006/people.html -.. _page: http://wiki.python.org/moin/EuroPython2006 diff --git a/pypy/doc/discussion/GC-performance.txt b/pypy/doc/discussion/GC-performance.txt deleted file mode 100644 --- a/pypy/doc/discussion/GC-performance.txt +++ /dev/null @@ -1,118 +0,0 @@ -StartHeapsize# is the framework GC as of revision 31586 with initial -bytes_malloced_threshold of 2-512 MB - -NewHeuristics is the framework GC with a new heuristics for adjusting -the bytes_malloced_threshold - -:: - - Pystone - StartHeapsize2: - This machine benchmarks at 5426.92 pystones/second - This machine benchmarks at 5193.91 pystones/second - This machine benchmarks at 5403.46 pystones/second - StartHeapsize8: - This machine benchmarks at 6075.33 pystones/second - This machine benchmarks at 6007.21 pystones/second - This machine benchmarks at 6122.45 pystones/second - StartHeapsize32: - This machine benchmarks at 6643.05 pystones/second - This machine benchmarks at 6590.51 pystones/second - This machine benchmarks at 6593.41 pystones/second - StartHeapsize128: - This machine benchmarks at 7065.47 pystones/second - This machine benchmarks at 7102.27 pystones/second - This machine benchmarks at 7082.15 pystones/second - StartHeapsize512: - This machine benchmarks at 7208.07 pystones/second - This machine benchmarks at 7197.7 pystones/second - This machine benchmarks at 7246.38 pystones/second - NewHeuristics: - This machine benchmarks at 6821.28 pystones/second - This machine benchmarks at 6858.71 pystones/second - This machine benchmarks at 6902.9 pystones/second - - - Richards - StartHeapSize2: - Average time per iteration: 5456.21 ms - Average time per iteration: 5529.31 ms - Average time per iteration: 5398.82 ms - StartHeapsize8: - Average time per iteration: 4775.43 ms - Average time per iteration: 4753.25 ms - Average time per iteration: 4781.37 ms - StartHeapsize32: - Average time per iteration: 4554.84 ms - Average time per iteration: 4501.86 ms - Average time per iteration: 4531.59 ms - StartHeapsize128: - Average time per iteration: 4329.42 ms - Average time per iteration: 4360.87 ms - Average time per iteration: 4392.81 ms - StartHeapsize512: - Average time per iteration: 4371.72 ms - Average time per iteration: 4399.70 ms - Average time per iteration: 4354.66 ms - NewHeuristics: - Average time per iteration: 4763.56 ms - Average time per iteration: 4803.49 ms - Average time per iteration: 4840.68 ms - - - translate rpystone - time pypy-c translate --text --batch --backendopt --no-compile targetrpystonedalone.py - StartHeapSize2: - real 1m38.459s - user 1m35.582s - sys 0m0.440s - StartHeapsize8: - real 1m35.398s - user 1m33.878s - sys 0m0.376s - StartHeapsize32: - real 1m5.475s - user 1m5.108s - sys 0m0.180s - StartHeapsize128: - real 0m52.941s - user 0m52.395s - sys 0m0.328s - StartHeapsize512: - real 1m3.727s - user 0m50.031s - sys 0m1.240s - NewHeuristics: - real 0m53.449s - user 0m52.771s - sys 0m0.356s - - - docutils - time pypy-c rst2html doc/coding-guide.txt - StartHeapSize2: - real 0m36.125s - user 0m35.562s - sys 0m0.088s - StartHeapsize8: - real 0m32.678s - user 0m31.106s - sys 0m0.084s - StartHeapsize32: - real 0m22.041s - user 0m21.085s - sys 0m0.132s - StartHeapsize128: - real 0m19.350s - user 0m18.653s - sys 0m0.324s - StartHeapsize512: - real 0m19.116s - user 0m17.517s - sys 0m0.620s - NewHeuristics: - real 0m20.990s - user 0m20.109s - sys 0m0.196s - - diff --git a/pypy/doc/config/translation.instrumentctl.txt b/pypy/doc/config/translation.instrumentctl.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrumentctl.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.cc.txt b/pypy/doc/config/translation.cc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cc.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify which C compiler to use. diff --git a/pypy/doc/config/translation.backendopt.stack_optimization.txt b/pypy/doc/config/translation.backendopt.stack_optimization.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.stack_optimization.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable the optimized code generation for stack based machine, if the backend support it diff --git a/pypy/doc/config/objspace.std.prebuiltintfrom.txt b/pypy/doc/config/objspace.std.prebuiltintfrom.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.prebuiltintfrom.txt +++ /dev/null @@ -1,1 +0,0 @@ -see :config:`objspace.std.withprebuiltint`. diff --git a/pypy/doc/config/objspace.usemodules.operator.txt b/pypy/doc/config/objspace.usemodules.operator.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.operator.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'operator' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/paper-wishlist.txt b/pypy/doc/discussion/paper-wishlist.txt deleted file mode 100644 --- a/pypy/doc/discussion/paper-wishlist.txt +++ /dev/null @@ -1,27 +0,0 @@ -Things we would like to write papers about -========================================== - -- object space architecture + reflective space -- stackless transformation -- composable coroutines -- jit: - - overview paper - - putting our jit into the context of classical partial evaluation - - a jit technical paper too, probably - -- sandboxing - -Things about which writing a paper would be nice, which need more work first -============================================================================ - -- taint object space -- logic object space - -- jit - - - with some more work: how to deal in a JIT backend with less-that- - full-function compilation unit - - - work in progress (Anto?): our JIT on the JVM - - (later) removing the overhead of features not used, e.g. thunk space or - another special space diff --git a/pypy/doc/config/objspace.usemodules.__pypy__.txt b/pypy/doc/config/objspace.usemodules.__pypy__.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.__pypy__.txt +++ /dev/null @@ -1,9 +0,0 @@ -Use the '__pypy__' module. -This module is expected to be working and is included by default. -It contains special PyPy-specific functionality. -For example most of the special functions described in the `object space proxies` -document are in the module. -See the `__pypy__ module documentation`_ for more details. - -.. _`object space proxy`: ../objspace-proxies.html -.. _`__pypy__ module documentation`: ../__pypy__-module.html diff --git a/pypy/doc/config/objspace.std.withmethodcachecounter.txt b/pypy/doc/config/objspace.std.withmethodcachecounter.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcachecounter.txt +++ /dev/null @@ -1,1 +0,0 @@ -Testing/debug option for :config:`objspace.std.withmethodcache`. diff --git a/.hgsubstate b/.hgsubstate deleted file mode 100644 --- a/.hgsubstate +++ /dev/null @@ -1,3 +0,0 @@ -80037 greenlet -80348 lib_pypy/pyrepl -80409 testrunner diff --git a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt b/pypy/doc/config/translation.backendopt.merge_if_blocks.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.merge_if_blocks.txt +++ /dev/null @@ -1,26 +0,0 @@ -This optimization converts parts of flow graphs that result from -chains of ifs and elifs like this into merged blocks. - -By default flow graphing this kind of code:: - - if x == 0: - f() - elif x == 1: - g() - elif x == 4: - h() - else: - j() - -will result in a chain of blocks with two exits, somewhat like this: - -.. image:: unmergedblocks.png - -(reflecting how Python would interpret this code). Running this -optimization will transform the block structure to contain a single -"choice block" with four exits: - -.. image:: mergedblocks.png - -This can then be turned into a switch by the C backend, allowing the C -compiler to produce more efficient code. diff --git a/pypy/doc/config/objspace.opcodes.txt b/pypy/doc/config/objspace.opcodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.fork_before.txt b/pypy/doc/config/translation.fork_before.txt deleted file mode 100644 --- a/pypy/doc/config/translation.fork_before.txt +++ /dev/null @@ -1,4 +0,0 @@ -This is an option mostly useful when working on the PyPy toolchain. If you use -it, translate.py will fork before the specified phase. If the translation -crashes after that fork, you can fix the bug in the toolchain, and continue -translation at the fork-point. diff --git a/pypy/doc/discussion/parsing-ideas.txt b/pypy/doc/discussion/parsing-ideas.txt deleted file mode 100644 --- a/pypy/doc/discussion/parsing-ideas.txt +++ /dev/null @@ -1,5 +0,0 @@ -add a way to modularize regular expressions: - -_HEXNUM = "..."; -_DECNUM = "..."; -NUM = "{_HEXNUM}|{_DECNUM}"; diff --git a/pypy/doc/config/objspace.std.withstrbuf.txt b/pypy/doc/config/objspace.std.withstrbuf.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrbuf.txt +++ /dev/null @@ -1,4 +0,0 @@ -Enable "string buffer" objects. - -Similar to "string join" objects, but using a StringBuilder to represent -a string built by repeated application of ``+=``. diff --git a/pypy/doc/config/objspace.usemodules._rawffi.txt b/pypy/doc/config/objspace.usemodules._rawffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._rawffi.txt +++ /dev/null @@ -1,3 +0,0 @@ -An experimental module providing very low-level interface to -C-level libraries, for use when implementing ctypes, not -intended for a direct use at all. \ No newline at end of file diff --git a/pypy/doc/getting-started.txt b/pypy/doc/getting-started.txt deleted file mode 100644 --- a/pypy/doc/getting-started.txt +++ /dev/null @@ -1,123 +0,0 @@ -================================== -PyPy - Getting Started -================================== - -.. contents:: -.. sectnum:: - -.. _howtopypy: - -What is PyPy ? -============== - -PyPy is an implementation of the Python_ programming language written in -Python itself, flexible and easy to experiment with. -We target a large variety of platforms, small and large, by providing a -compiler toolsuite that can produce custom Python versions. Platform, memory -and threading models, as well as the JIT compiler itself, are aspects of the -translation process - as opposed to encoding low level details into the -language implementation itself. `more...`_ - - -.. _Python: http://docs.python.org/ref -.. _`more...`: architecture.html - -Just the facts -============== - -Clone the repository --------------------- - -Before you can play with PyPy, you will need to obtain a copy -of the sources. This can be done either by `downloading them -from the download page`_ or by checking them out from the -repository using mercurial. We suggest using mercurial if one -wants to access the current development. - -.. _`downloading them from the download page`: download.html - -If you choose to use mercurial, you must issue the following command on your -command line, DOS box, or terminal:: - - hg clone http://bitbucket.org/pypy/pypy pypy - -If you get an error like this:: - - abort: repository [svn]http://codespeak.net/svn/pypy/build/testrunner not found! - -it probably means that your mercurial version is too old. You need at least -Mercurial 1.6 to clone the PyPy repository. - -This will clone the repository and place it into a directory -named ``pypy``, and will get you the PyPy source in -``pypy/pypy`` and documentation files in ``pypy/pypy/doc``. -We try to ensure that the tip is always stable, but it might -occasionally be broken. You may want to check out `our nightly tests:`_ -find a revision (12-chars alphanumeric string, e.g. "963e808156b3") -that passed at least the -``{linux32}`` tests (corresponding to a ``+`` sign on the -line ``success``) and then, in your cloned repository, switch to this revision -using:: - - hg up -r XXXXX - -where XXXXX is the revision id. - -.. _`our nightly tests:`: http://buildbot.pypy.org/summary?branch= - -If you want to commit to our repository on bitbucket, you will have to -install subversion in addition to mercurial. - -Installing using virtualenv ---------------------------- - -It is often convenient to run pypy inside a virtualenv. To do this -you need a recent version of virtualenv -- 1.5 or greater. You can -then install PyPy both from a precompiled tarball or from a mercurial -checkout:: - - # from a tarball - $ virtualenv -p /opt/pypy-c-jit-41718-3fb486695f20-linux/bin/pypy my-pypy-env - - # from the mercurial checkout - $ virtualenv -p /path/to/pypy/pypy/translator/goal/pypy-c my-pypy-env - -Note that bin/python is now a symlink to bin/pypy. - - -Where to go from here ----------------------- - -After you successfully manage to get PyPy's source you can read more about: - - - `Building and using PyPy's Python interpreter`_ - - `Learning more about the translation toolchain and how to develop (with) PyPy`_ - -.. _`Building and using PyPy's Python interpreter`: getting-started-python.html -.. _`Learning more about the translation toolchain and how to develop (with) PyPy`: getting-started-dev.html - - -Understanding PyPy's architecture ---------------------------------- - -For in-depth information about architecture and coding documentation -head over to the `documentation section`_ where you'll find lots of -interesting information. Additionally, in true hacker spirit, you -may just `start reading sources`_ . - -.. _`documentation section`: docindex.html -.. _`start reading sources`: getting-started-dev.html#start-reading-sources - -Filing bugs or feature requests -------------------------------- - -You may file `bug reports`_ on our issue tracker which is -also accessible through the 'issues' top menu of -the PyPy website. `Using the development tracker`_ has -more detailed information on specific features of the tracker. - -.. _`Using the development tracker`: coding-guide.html#using-development-tracker -.. _bug reports: https://codespeak.net/issue/pypy-dev/ - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.std.withmethodcache.txt b/pypy/doc/config/objspace.std.withmethodcache.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmethodcache.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable method caching. See the section "Method Caching" in `Standard -Interpreter Optimizations <../interpreter-optimizations.html#method-caching>`__. diff --git a/pypy/doc/config/objspace.usemodules._random.txt b/pypy/doc/config/objspace.usemodules._random.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._random.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_random' module. It is necessary to use the module "random" from the standard library. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/removing-stable-compiler.txt b/pypy/doc/discussion/removing-stable-compiler.txt deleted file mode 100644 --- a/pypy/doc/discussion/removing-stable-compiler.txt +++ /dev/null @@ -1,22 +0,0 @@ -February 28th, 2006 - -While implementing conditional expressions from 2.5 we had to change -the stable compiler in order to keep tests from breaking. While using -stable compiler as a baseline made sense when the ast compiler was -new, it is less and less true as new grammar changes are introduced. - -Options include - -1. Freezing the stable compiler at grammar 2.4. - -2. Capture AST output from the stable compiler and use that explicitly -in current tests instead of regenerating them every time, primarily -because it allows us to change the grammar without changing the stable -compiler. - - -In either case, AST production tests for new grammar changes could be -written manually, which is less effort than fixing the stable -compiler (which itself isn't really tested anyway). - -Discussion by Arre, Anders L., Stuart Williams diff --git a/pypy/doc/config/translation.backendopt.txt b/pypy/doc/config/translation.backendopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -This group contains options about various backend optimization passes. Most of -them are described in the `EU report about optimization`_ - -.. _`EU report about optimization`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - diff --git a/pypy/doc/config/index.txt b/pypy/doc/config/index.txt deleted file mode 100644 --- a/pypy/doc/config/index.txt +++ /dev/null @@ -1,52 +0,0 @@ -============================== -Configuration Options for PyPy -============================== - -This directory contains documentation for the many `configuration`_ -options that can be used to affect PyPy's behaviour. There are two -main classes of option, `object space options`_ and `translation -options`_. - -There are two main entry points that accept options: ``py.py``, which -implements Python on top of another Python interpreter and accepts all -the `object space options`_: - -.. parsed-literal:: - - ./py.py <`objspace options`_> - -and the ``translate.py`` translation entry -point which takes arguments of this form: - -.. parsed-literal:: - - ./translate.py <`translation options`_> - -For the common case of ```` being ``targetpypystandalone.py``, -you can then pass the `object space options`_ after -``targetpypystandalone.py``, i.e. like this: - -.. parsed-literal:: - - ./translate.py <`translation options`_> targetpypystandalone.py <`objspace options`_> - -There is an `overview`_ of all command line arguments that can be -passed in either position. - -Many of the more interesting object space options enable optimizations, -which are described in `Standard Interpreter Optimizations`_, or allow -the creation of objects that can barely be imagined in CPython, which -are documented in `What PyPy can do for your objects`_. - -The following diagram gives some hints about which PyPy features work together -with which other PyPy features: - -.. image:: ../image/compat-matrix.png - -.. _`configuration`: ../configuration.html -.. _`objspace options`: commandline.html#objspace -.. _`object space options`: commandline.html#objspace -.. _`translation options`: commandline.html#translation -.. _`overview`: commandline.html -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html -.. _`What PyPy can do for your objects`: ../objspace-proxies.html diff --git a/pypy/doc/config/translation.jit_profiler.txt b/pypy/doc/config/translation.jit_profiler.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_profiler.txt +++ /dev/null @@ -1,1 +0,0 @@ -Integrate profiler support into the JIT diff --git a/pypy/doc/conftest.py b/pypy/doc/conftest.py deleted file mode 100644 --- a/pypy/doc/conftest.py +++ /dev/null @@ -1,29 +0,0 @@ -import py - -from pypy.config.makerestdoc import register_config_role -docdir = py.path.local(__file__).dirpath() - -pytest_plugins = "pypy.doc.pytest_restdoc" - -def pytest_addoption(parser): - group = parser.getgroup("pypy-doc options") - group.addoption('--pypy-doctests', action="store_true", - dest="pypy_doctests", default=False, - help="enable doctests in .txt files") - group.addoption('--generate-redirections', - action="store_true", dest="generateredirections", - default=True, help="Generate redirecting HTML files") - -def pytest_configure(config): - register_config_role(docdir) - -def pytest_doctest_prepare_content(content): - if not py.test.config.getvalue("pypy_doctests"): - py.test.skip("specify --pypy-doctests to run doctests") - l = [] - for line in content.split("\n"): - if line.find('>>>>') != -1: - line = "" - l.append(line) - return "\n".join(l) - diff --git a/pypy/doc/config/objspace.usemodules.cmath.txt b/pypy/doc/config/objspace.usemodules.cmath.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cmath.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'cmath' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.mmap.txt b/pypy/doc/config/objspace.usemodules.mmap.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.mmap.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'mmap' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.simplifying.txt b/pypy/doc/config/translation.simplifying.txt deleted file mode 100644 --- a/pypy/doc/config/translation.simplifying.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._socket.txt b/pypy/doc/config/objspace.usemodules._socket.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._socket.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the '_socket' module. - -This is our implementation of '_socket', the Python builtin module -exposing socket primitives, which is wrapped and used by the standard -library 'socket.py' module. It is based on `rffi`_. - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.backend.txt b/pypy/doc/config/translation.backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backend.txt +++ /dev/null @@ -1,3 +0,0 @@ -Which backend to use when translating, see `translation documentation`_. - -.. _`translation documentation`: ../translation.html diff --git a/pypy/doc/config/translation.force_make.txt b/pypy/doc/config/translation.force_make.txt deleted file mode 100644 --- a/pypy/doc/config/translation.force_make.txt +++ /dev/null @@ -1,1 +0,0 @@ -Force executing makefile instead of using platform. diff --git a/pypy/doc/config/translation.vanilla.txt b/pypy/doc/config/translation.vanilla.txt deleted file mode 100644 --- a/pypy/doc/config/translation.vanilla.txt +++ /dev/null @@ -1,2 +0,0 @@ -Try to make the resulting compiled program as portable (=movable to another -machine) as possible. Which is not much. diff --git a/pypy/doc/config/objspace.usemodules._bisect.txt b/pypy/doc/config/objspace.usemodules._bisect.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._bisect.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_bisect' module. -Used, optionally, by the 'bisect' standard lib module. This module is expected to be working and is included by default. - - diff --git a/pypy/doc/config/translation.jit_backend.txt b/pypy/doc/config/translation.jit_backend.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit_backend.txt +++ /dev/null @@ -1,2 +0,0 @@ -Choose the backend to use for the JIT. -By default, this is the best backend for the current platform. diff --git a/pypy/doc/config/commandline.txt b/pypy/doc/config/commandline.txt deleted file mode 100644 --- a/pypy/doc/config/commandline.txt +++ /dev/null @@ -1,33 +0,0 @@ - -.. contents:: - - -.. _objspace: -.. _`overview-of-command-line-options-for-objspace`: - -------------------------------- -PyPy Python interpreter options -------------------------------- - -The following options can be used after ``translate.py -targetpypystandalone`` or as options to ``py.py``. - -.. GENERATE: objspace - - -.. _translation: -.. _`overview-of-command-line-options-for-translation`: - ---------------------------- -General translation options ---------------------------- - -The following are options of ``translate.py``. They must be -given before the ``targetxxx`` on the command line. - -* `--opt -O:`__ set the optimization level `[0, 1, size, mem, 2, 3]` - -.. __: opt.html - -.. GENERATE: translation - diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). - -.. internal diff --git a/pypy/doc/config/objspace.usemodules._sha.txt b/pypy/doc/config/objspace.usemodules._sha.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sha.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in _'sha' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/objspace.usemodules.time.txt b/pypy/doc/config/objspace.usemodules.time.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.time.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the 'time' module. - -Obsolete; use :config:`objspace.usemodules.rctime` for our up-to-date version -of the application-level 'time' module. diff --git a/pypy/doc/config/objspace.translationmodules.txt b/pypy/doc/config/objspace.translationmodules.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.translationmodules.txt +++ /dev/null @@ -1,1 +0,0 @@ -This option enables all modules which are needed to translate PyPy using PyPy. diff --git a/pypy/doc/discussion/cmd-prompt-translation.txt b/pypy/doc/discussion/cmd-prompt-translation.txt deleted file mode 100644 --- a/pypy/doc/discussion/cmd-prompt-translation.txt +++ /dev/null @@ -1,18 +0,0 @@ - -t = Translation(entry_point[,]) -t.annotate([]) -t.rtype([]) -t.backendopt[_]([]) -t.source[_]([]) -f = t.compile[_]([]) - -and t.view(), t.viewcg() - - = c|llvm (for now) -you can skip steps - - = argtypes (for annotation) plus - keyword args: gc=...|policy= etc - - - diff --git a/pypy/doc/config/objspace.usemodules._hashlib.txt b/pypy/doc/config/objspace.usemodules._hashlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._hashlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_hashlib' module. -Used by the 'hashlib' standard lib module, and indirectly by the various cryptographic libs. This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/use_case_of_logic.txt b/pypy/doc/discussion/use_case_of_logic.txt deleted file mode 100644 --- a/pypy/doc/discussion/use_case_of_logic.txt +++ /dev/null @@ -1,75 +0,0 @@ -Use cases for a combination of Logic and Object Oriented programming approach -------------------------------------------------------------------------------- - -Workflows -========= - -Defining the next state by solving certain constraints. The more -general term might be State machines. - -Business Logic -============== - -We define Business Logic as expressing consistency (as an example) on -a set of objects in a business application. - -For example checking the consistency of a calculation before -committing the changes. - -The domain is quite rich in example of uses of Business Logic. - -Datamining -=========== - -An example is Genetic sequence matching. - -Databases -========= - -Validity constraints for the data can be expressed as constraints. - -Constraints can be used to perform type inference when querying the -database. - -Semantic web -============= - -The use case is like the database case, except the ontology language -it self is born out of Descriptive Logic - - -User Interfaces -=============== - -We use rules to describe the layout and visibility constraints of -elements that are to be displayed on screen. The rule can also help -describing how an element is to be displayed depending on its state -(for instance, out of bound values can be displayed in a different -colour). - -Configuration -============== - -User configuration can use information inferred from : the current -user, current platforms , version requirements, ... - -The validity of the configuration can be checked with the constraints. - - -Scheduling and planning -======================== - -Timetables, process scheduling, task scheduling. - -Use rules to determine when to execute tasks (only start batch, if load -is low, and previous batch is finished. - -Load sharing. - -Route optimization. Planning the routes of a technician based on tools -needed and such - -An example is scheduling a conference like Europython see: - -http://lists.logilab.org/pipermail/python-logic/2005-May/000107.html - diff --git a/pypy/doc/config/objspace.usemodules.gc.txt b/pypy/doc/config/objspace.usemodules.gc.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.gc.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the 'gc' module. -This module is expected to be working and is included by default. -Note that since the gc module is highly implementation specific, it contains -only the ``collect`` function in PyPy, which forces a collection when compiled -with the framework or with Boehm. diff --git a/pypy/doc/discussion/ctypes_todo.txt b/pypy/doc/discussion/ctypes_todo.txt deleted file mode 100644 --- a/pypy/doc/discussion/ctypes_todo.txt +++ /dev/null @@ -1,34 +0,0 @@ -Few ctypes-related todo points: - -* Write down missing parts and port all tests, eventually adding - additional tests. - - - for unions and structs, late assignment of _fields_ is somewhat buggy. - Tests about behavior of getattr working properly on instances - are missing or not comprehensive. Some tests are skipped because I didn't - understand the details. - - - _fields_ can be tuples too as well as lists - - - restype being a function is not working. - - - there are features, which we don't support like buffer() and - array() protocols. - - - are the _CData_value return lifetime/gc semantics correct? - - - for some ABIs we will need completely filled ffitypes to do the - right thing for passing structures by value, we are now passing enough - information to rawffi that it should be possible to construct such precise - ffitypes in most cases - - - bitfields are not implemented - - - byteorder is not implemented - -* as all stuff is applevel, we cannot have it really fast right now. - -* we shall at least try to approach ctypes from the point of the jit - backends (at least on platforms that we support). The thing is that - we need a lot broader support of jit backends for different argument - passing in order to do it. diff --git a/pypy/doc/config/objspace.std.withsmalllong.txt b/pypy/doc/config/objspace.std.withsmalllong.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmalllong.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable "small longs", an additional implementation of the Python -type "long", implemented with a C long long. It is mostly useful -on 32-bit; on 64-bit, a C long long is the same as a C long, so -its usefulness is limited to Python objects of type "long" that -would anyway fit in an "int". diff --git a/pypy/doc/config/objspace.usemodules._weakref.txt b/pypy/doc/config/objspace.usemodules._weakref.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._weakref.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use the '_weakref' module, necessary for the standard lib 'weakref' module. -PyPy's weakref implementation is not completely stable yet. The first -difference to CPython is that weak references only go away after the next -garbage collection, not immediately. The other problem seems to be that under -certain circumstances (that we have not determined) weak references keep the -object alive. diff --git a/pypy/doc/config/objspace.usemodules.posix.txt b/pypy/doc/config/objspace.usemodules.posix.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.posix.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the essential 'posix' module. -This module is essential, included by default and cannot be removed (even when -specified explicitly, the option gets overridden later). diff --git a/pypy/doc/config/translation.backendopt.really_remove_asserts.txt b/pypy/doc/config/translation.backendopt.really_remove_asserts.txt deleted file mode 100644 diff --git a/pypy/doc/discussion/thoughts_string_interning.txt b/pypy/doc/discussion/thoughts_string_interning.txt deleted file mode 100644 --- a/pypy/doc/discussion/thoughts_string_interning.txt +++ /dev/null @@ -1,211 +0,0 @@ -String Interning in PyPy -======================== - -A few thoughts about string interning. CPython gets a remarkable -speed-up by interning strings. Interned are all builtin string -objects and all strings used as names. The effect is that when -a string lookup is done during instance attribute access, -the dict lookup method will find the string always by identity, -saving the need to do a string comparison. - -Interned Strings in CPython ---------------------------- - -CPython keeps an internal dictionary named ``interned`` for all of these -strings. It contains the string both as key and as value, which means -there are two extra references in principle. Upto Version 2.2, interned -strings were considered immortal. Once they entered the ``interned`` dict, -nothing could revert this memory usage. - -Starting with Python 2.3, interned strings became mortal by default. -The reason was less memory usage for strings that have no external -reference any longer. This seems to be a worthwhile enhancement. -Interned strings that are really needed always have a real reference. -Strings which are interned for temporary reasons get a big speed up -and can be freed after they are no longer in use. - -This was implemented by making the ``interned`` dictionary a weak dict, -by lowering the refcount of interned strings by 2. The string deallocator -got extra handling to look into the ``interned`` dict when a string is deallocated. -This is supported by the state variable on string objects which tells -whether the string is not interned, immortal or mortal. - -Implementation problems for PyPy --------------------------------- - -- The CPython implementation makes explicit use of the refcount to handle - the weak-dict behavior of ``interned``. PyPy does not expose the implementation - of object aliveness. Special handling would be needed to simulate mortal - behavior. A possible but expensive solution would be to use a real - weak dictionary. Another way is to add a special interface to the backend - that allows either the two extra references to be reset, or for the - boehm collector to exclude the ``interned`` dict from reference tracking. - -- PyPy implements quite complete internal strings, as opposed to CPython - which always uses its "applevel" strings. It also supports low-level - dictionaries. This adds some complication to the issue of interning. - Additionally, the interpreter currently handles attribute access - by calling wrap(str) on the low-level attribute string when executing - frames. This implies that we have to primarily intern low-level strings - and cache the created string objects on top of them. - A possible implementation would use a dict with ll string keys and the - string objects as values. In order to save the extra dict lookup, we also - could consider to cache the string object directly on a field of the rstr, - which of course adds some extra cost. Alternatively, a fast id-indexed - extra dictionary can provide the mapping from rstr to interned string object. - But for efficiency reasons, it is anyway necessary to put an extra flag about - interning on the strings. Flagging this by putting the string object itself - as the flag might be acceptable. A dummyobject can be used if the interned - rstr is not exposed as an interned string object. - -Update: a reasonably simple implementation -------------------------------------------- - -Instead of the complications using the stringobject as a property of an rstr -instance, I propose to special case this kind of dictionary (mapping rstr -to stringobject) and to put an integer ``interned`` field into the rstr. The -default is -1 for not interned. Non-negative values are the direct index -of this string into the interning dict. That is, we grow an extra function -that indexes the dict by slot number of the dict table and gives direct -access to its value. The dictionary gets special handling on dict_resize, -to recompute the slot numbers of the interned strings. ATM I'd say we leave -the strings immortal and support mortality later when we have a cheap -way to express this (less refcount, exclusion from Boehm, whatever). - -A prototype brute-force patch ------------------------------ - -In order to get some idea how efficient string interning is at the moment, -I implemented a quite crude version of interning. I patched space.wrap -to call this intern_string instead of W_StringObject:: - - def intern_string(space, str): - if we_are_translated(): - _intern_ids = W_StringObject._intern_ids - str_id = id(str) - w_ret = _intern_ids.get(str_id, None) - if w_ret is not None: - return w_ret - _intern = W_StringObject._intern - if str not in _intern: - _intern[str] = W_StringObject(space, str) - W_StringObject._intern_keep[str_id] = str - _intern_ids[str_id] = w_ret = _intern[str] - return w_ret - else: - return W_StringObject(space, str) - -This is no general solution at all, since it a) does not provide -interning of rstr and b) interns every app-level string. The -implementation is also by far not as efficient as it could be, -because it utilizes an extra dict _intern_ids which maps the -id of the rstr to the string object, and a dict _intern_keep to -keep these ids alive. - -With just a single _intern dict from rstr to string object, the -overall performance degraded slightly instead of an advantage. -The triple dict patch accelerates richards by about 12 percent. -Since it still has the overhead of handling the extra dicts, -I guess we can expect twice the acceleration if we add proper -interning support. - -The resulting estimated 24 % acceleration is still not enough -to justify an implementation right now. - -Here the results of the richards benchmark:: - - D:\pypy\dist\pypy\translator\goal>pypy-c-17516.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c-17516.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 38 secs - Average time for iterations: 38885 ms - - D:\pypy\dist\pypy\translator\goal>pypy-c.exe -c "from richards import *;Richards.iterations=1;main()" - debug: entry point starting - debug: argv -> pypy-c.exe - debug: argv -> -c - debug: argv -> from richards import *;Richards.iterations=1;main() - Richards benchmark (Python) starting... [] - finished. - Total time for 1 iterations: 34 secs - Average time for iterations: 34388 ms - - D:\pypy\dist\pypy\translator\goal> - - -This was just an exercise to get an idea. For sure this is not to be checked in. -Instead, I'm attaching the simple patch here for reference. -:: - - Index: objspace/std/objspace.py - =================================================================== - --- objspace/std/objspace.py (revision 17526) - +++ objspace/std/objspace.py (working copy) - @@ -243,6 +243,9 @@ - return self.newbool(x) - return W_IntObject(self, x) - if isinstance(x, str): - + # XXX quick speed testing hack - + from pypy.objspace.std.stringobject import intern_string - + return intern_string(self, x) - return W_StringObject(self, x) - if isinstance(x, unicode): - return W_UnicodeObject(self, [unichr(ord(u)) for u in x]) # xxx - Index: objspace/std/stringobject.py - =================================================================== - --- objspace/std/stringobject.py (revision 17526) - +++ objspace/std/stringobject.py (working copy) - @@ -18,6 +18,10 @@ - class W_StringObject(W_Object): - from pypy.objspace.std.stringtype import str_typedef as typedef - - + _intern_ids = {} - + _intern_keep = {} - + _intern = {} - + - def __init__(w_self, space, str): - W_Object.__init__(w_self, space) - w_self._value = str - @@ -32,6 +36,21 @@ - - registerimplementation(W_StringObject) - - +def intern_string(space, str): - + if we_are_translated(): - + _intern_ids = W_StringObject._intern_ids - + str_id = id(str) - + w_ret = _intern_ids.get(str_id, None) - + if w_ret is not None: - + return w_ret - + _intern = W_StringObject._intern - + if str not in _intern: - + _intern[str] = W_StringObject(space, str) - + W_StringObject._intern_keep[str_id] = str - + _intern_ids[str_id] = w_ret = _intern[str] - + return w_ret - + else: - + return W_StringObject(space, str) - - def _isspace(ch): - return ord(ch) in (9, 10, 11, 12, 13, 32) - Index: objspace/std/stringtype.py - =================================================================== - --- objspace/std/stringtype.py (revision 17526) - +++ objspace/std/stringtype.py (working copy) - @@ -47,6 +47,10 @@ - if space.is_true(space.is_(w_stringtype, space.w_str)): - return w_obj # XXX might be reworked when space.str() typechecks - value = space.str_w(w_obj) - + # XXX quick hack to check interning effect - + w_obj = W_StringObject._intern.get(value, None) - + if w_obj is not None: - + return w_obj - w_obj = space.allocate_instance(W_StringObject, w_stringtype) - W_StringObject.__init__(w_obj, space, value) - return w_obj - -ciao - chris diff --git a/pypy/doc/discussion/compiled-swamp.txt b/pypy/doc/discussion/compiled-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/compiled-swamp.txt +++ /dev/null @@ -1,14 +0,0 @@ - -We've got huge swamp of compiled pypy-c's used for: - -* benchmarks -* tests -* compliance tests -* play1 -* downloads -* ... - -We've got build tool, which we don't use, etc. etc. - -Idea is to formalize it more or less, so we'll have single script -to make all of this work, upload builds to the web page etc. diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal.txt +++ /dev/null @@ -1,10 +0,0 @@ -Try to inline flowgraphs based on whether doing so would enable malloc -removal (:config:`translation.backendopt.mallocs`.) by eliminating -calls that result in escaping. This is an experimental optimization, -also right now some eager inlining is necessary for helpers doing -malloc itself to be inlined first for this to be effective. -This option enable also an extra subsequent malloc removal phase. - -Callee flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.clever_malloc_removal_threshold` ). diff --git a/pypy/doc/config/objspace.usemodules.token.txt b/pypy/doc/config/objspace.usemodules.token.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.token.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'token' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.secondaryentrypoints.txt b/pypy/doc/config/translation.secondaryentrypoints.txt deleted file mode 100644 --- a/pypy/doc/config/translation.secondaryentrypoints.txt +++ /dev/null @@ -1,1 +0,0 @@ -Enable secondary entrypoints support list. Needed for cpyext module. diff --git a/pypy/doc/config/objspace.lonepycfiles.txt b/pypy/doc/config/objspace.lonepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.lonepycfiles.txt +++ /dev/null @@ -1,16 +0,0 @@ -If turned on, PyPy accepts to import a module ``x`` if it finds a -file ``x.pyc`` even if there is no file ``x.py``. - -This is the way that CPython behaves, but it is disabled by -default for PyPy because it is a common cause of issues: most -typically, the ``x.py`` file is removed (manually or by a -version control system) but the ``x`` module remains -accidentally importable because the ``x.pyc`` file stays -around. - -The usual reason for wanting this feature is to distribute -non-open-source Python programs by distributing ``pyc`` files -only, but this use case is not practical for PyPy at the -moment because multiple versions of PyPy compiled with various -optimizations might be unable to load each other's ``pyc`` -files. diff --git a/pypy/doc/discussion/distribution.txt b/pypy/doc/discussion/distribution.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution.txt +++ /dev/null @@ -1,34 +0,0 @@ -=================================================== -(Semi)-transparent distribution of RPython programs -=================================================== - -Some (rough) ideas how I see distribution ------------------------------------------ - -The main point about it, is to behave very much like JIT - not -to perform distribution on Python source code level, but instead -perform distribution of RPython source, and eventually perform -distribution of interpreter at the end. - -This attempt gives same advantages as off-line JIT (any RPython based -interpreter, etc.) and gives nice field to play with different -distribution heuristics. This also makes eventually nice possibility -of integrating JIT with distribution, thus allowing distribution -heuristics to have more information that they might have otherwise and -as well with specializing different nodes in performing different tasks. - -Flow graph level ----------------- - -Probably the best place to perform distribution attempt is to insert -special graph distributing operations into low-level graphs (either lltype -or ootype based), which will allow distribution heuristic to decide -on entrypoint to block/graph/some other structure??? what variables/functions -are accessed inside some part and if it's worth transferring it over wire. - -Backend level -------------- - -Backends will need explicit support for distribution of any kind. Basically -it should be possible for backend to remotely call block/graph/structure -in any manner (it should strongly depend on backend possibilities). diff --git a/pypy/doc/config/objspace.usemodules.binascii.txt b/pypy/doc/config/objspace.usemodules.binascii.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.binascii.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the RPython 'binascii' module. diff --git a/pypy/doc/config/translation.type_system.txt b/pypy/doc/config/translation.type_system.txt deleted file mode 100644 --- a/pypy/doc/config/translation.type_system.txt +++ /dev/null @@ -1,4 +0,0 @@ -Which type system to use when rtyping_. This option should not be set -explicitly. - -.. _rtyping: ../rtyper.html diff --git a/pypy/doc/discussion/distribution-newattempt.txt b/pypy/doc/discussion/distribution-newattempt.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-newattempt.txt +++ /dev/null @@ -1,65 +0,0 @@ -Distribution: -============= - -This is outcome of Armin's and Samuele's ideas and our discussion, -kept together by fijal. - -The communication layer: -======================== - -Communication layer is the layer which takes care of explicit -communication. Suppose we do have two (or more) running interpreters -on different machines or in different processes. Let's call it *local side* -(the one on which we're operating) and *remote side*. - -What we want to achieve is to have a transparent enough layer on local -side, which does not allow user to tell the objects local and remote apart -(despite __pypy__.internal_repr, which I would consider cheating). - -Because in pypy we have possibility to have different implementations -for types (even builtin ones), we can use that mechanism to implement -our simple RMI. - -The idea is to provide thin layer for accessing remote object, lays as -different implementation for any possible object. So if you perform any -operation on an object locally, which is really a remote object, you -perform all method lookup and do a call on it. Than proxy object -redirects the call to app-level code (socket, execnet, whatever) which -calls remote interpreter with given parameters. It's important that we -can always perform such a call, even if types are not marshallable, because -we can provide remote proxies of local objects to remote side in that case. - -XXX: Need to explain in a bit more informative way. - -Example: --------- - -Suppose we do have ``class A`` and instance ``a = A()`` on remote side -and we want to access this from a local side. We make an object of type -``object`` and we do copy -``__dict__`` keys with values, which correspond to objects on the remote -side (have the same type to user) but they've got different implementation. -(Ie. method calling will look like quite different). - -Even cooler example: --------------------- - -Reminding hpk's example of 5-liner remote file server. With this we make:: - - f = remote_side.import(open) - f("file_name").read() - -Implementation plans: ---------------------- - -We need: - -* app-level primitives for having 'remote proxy' accessible - -* some "serialiser" which is not truly serialising stuff, but making - sure communication will go. - -* interp-level proxy object which emulates every possible object which - delegates operations to app-level primitive proxy. - -* to make it work.... diff --git a/pypy/doc/config/objspace.geninterp.txt b/pypy/doc/config/objspace.geninterp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.geninterp.txt +++ /dev/null @@ -1,4 +0,0 @@ -This option enables `geninterp`_. This will usually make the PyPy interpreter -significantly faster (but also a bit bigger). - -.. _`geninterp`: ../geninterp.html diff --git a/pypy/doc/config/objspace.usemodules.oracle.txt b/pypy/doc/config/objspace.usemodules.oracle.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.oracle.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'oracle' module. -This module is off by default, requires oracle client installed. diff --git a/pypy/doc/discussion/distribution-implementation.txt b/pypy/doc/discussion/distribution-implementation.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-implementation.txt +++ /dev/null @@ -1,91 +0,0 @@ -===================================================== -Random implementation details of distribution attempt -===================================================== - -.. contents:: -.. sectnum:: - -This document attempts to broaden this `dist thoughts`_. - -.. _`dist thoughts`: distribution-newattempt.html - -Basic implementation: ---------------------- - -First we do split objects into value-only primitives (like int) and other. -Basically immutable builtin types which cannot contain user-level objects -(int, float, long, str, None, etc.) will be always transferred as value-only -objects (having no states etc.). The every other object (user created classes, -instances, modules, lists, tuples, etc. etc.) are always executed by reference. -(Of course if somebody wants to ie. copy the instance, he can marshal/pickle -this to string and send, but it's outside the scope of this attempt). Special -case might be immutable data structure (tuple, frozenset) containing simple -types (this becomes simple type). - -XXX: What to do with code types? Marshalling them and sending seems to have no -sense. Remote execution? Local execution with remote f_locals and f_globals? - -Every remote object has got special class W_RemoteXXX where XXX is interp-level -class implementing this object. W_RemoteXXX implements all the operations -by using special app-level code that sends method name and arguments over the wire -(arguments might be either simple objects which are simply send over the app-level -code or references to local objects). - -So the basic scheme would look like:: - - remote_ref = remote("Object reference") - remote_ref.any_method() - -``remote_ref`` in above example looks like normal python object to user, -but is implemented differently (W_RemoteXXX), and uses app-level proxy -to forward each interp-level method call. - -Abstraction layers: -------------------- - -In this section we define remote side as a side on which calls are -executed and local side is the one on which calls are run. - -* Looking from the local side, first thing that we see is object - which looks like normal object (has got the same interp-level typedef) - but has got different implementation. Basically this is the shallow copy - of remote object (however you define shallow, it's up to the code which - makes the copy. Basically the copy which can be marshalled or send over - the wire or saved for future purpose). This is W_RemoteXXX where XXX is - real object name. Some operations on that object requires accessing remote - side of the object, some might not need such (for example remote int - is totally the same int as local one, it could not even be implemented - differently). - -* For every interp-level operation, which accesses internals that are not - accessible at the local side, (basically all attribute accesses which - are accessing things that are subclasses of W_Object) we provide special - W_Remote version, which downloads necessary object when needed - (if accessed). This is the same as normal W_RemoteXXX (we know the type!) - but not needed yet. - -* From the remote point of view, every exported object which needs such - has got a local appropriate storage W_LocalXXX where XXX is a type - by which it could be accessed from a wire. - -The real pain: --------------- - -For every attribute access when we get W_RemoteXXX, we need to check -the download flag - which sucks a bit. (And we have to support it somehow -in annotator, which sucks a lot). The (some) idea is to wrap all the methods -with additional checks, but that's both unclear and probably not necessary. - -XXX If we can easily change underlying implementation of an object, than -this might become way easier. Right now I'll try to have it working and -thing about RPython later. - -App-level remote tool: ----------------------- - -For purpose of app-level tool which can transfer the data (well, socket might -be enough, but suppose I want to be more flexible), I would use `py.execnet`_, -probably using some of the Armin's hacks to rewrite it using greenlets instead -of threads. - -.. _`py.execnet`: http://codespeak.net/py/current/doc/execnet.html diff --git a/pypy/doc/config/objspace.std.withtypeversion.txt b/pypy/doc/config/objspace.std.withtypeversion.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtypeversion.txt +++ /dev/null @@ -1,6 +0,0 @@ -This (mostly internal) option enables "type versions": Every type object gets an -(only internally visible) version that is updated when the type's dict is -changed. This is e.g. used for invalidating caches. It does not make sense to -enable this option alone. - -.. internal diff --git a/pypy/doc/config/translation.cli.trace_calls.txt b/pypy/doc/config/translation.cli.trace_calls.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.trace_calls.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal. Debugging aid for the CLI backend. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.struct.txt b/pypy/doc/config/objspace.usemodules.struct.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.struct.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in 'struct' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/architecture.txt b/pypy/doc/architecture.txt deleted file mode 100644 --- a/pypy/doc/architecture.txt +++ /dev/null @@ -1,264 +0,0 @@ -================================================== -PyPy - Goals and Architecture Overview -================================================== - -.. contents:: -.. sectnum:: - -This document gives an overview of the goals and architecture of PyPy. -See `getting started`_ for a practical introduction and starting points. - -Mission statement -==================== - -We aim to provide: - - * a common translation and support framework for producing - implementations of dynamic languages, emphasizing a clean - separation between language specification and implementation - aspects. - - * a compliant, flexible and fast implementation of the Python_ Language - using the above framework to enable new advanced features without having - to encode low level details into it. - -By separating concerns in this way, we intend for our implementation -of Python - and other dynamic languages - to become robust against almost -all implementation decisions, including target platform, memory and -threading models, optimizations applied, up to to the point of being able to -automatically *generate* Just-in-Time compilers for dynamic languages. - -Conversely, our implementation techniques, including the JIT compiler -generator, should become robust against changes in the languages -implemented. - - -High Level Goals -============================= - -PyPy - the Translation Framework ------------------------------------------------ - -Traditionally, language interpreters are written in a target platform language -like C/Posix, Java or C#. Each such implementation fundamentally provides -a mapping from application source code to the target environment. One of -the goals of the "all-encompassing" environments, like the .NET framework -and to some extent the Java virtual machine, is to provide standardized -and higher level functionalities in order to support language implementers -for writing language implementations. - -PyPy is experimenting with a more ambitious approach. We are using a -subset of the high-level language Python, called RPython_, in which we -write languages as simple interpreters with few references to and -dependencies on lower level details. Our translation framework then -produces a concrete virtual machine for the platform of our choice by -inserting appropriate lower level aspects. The result can be customized -by selecting other feature and platform configurations. - -Our goal is to provide a possible solution to the problem of language -implementers: having to write ``l * o * p`` interpreters for ``l`` -dynamic languages and ``p`` platforms with ``o`` crucial design -decisions. PyPy aims at having any one of these parameters changeable -independently from each other: - -* ``l``: the language that we analyze can be evolved or entirely replaced; - -* ``o``: we can tweak and optimize the translation process to produce - platform specific code based on different models and trade-offs; - -* ``p``: we can write new translator back-ends to target different - physical and virtual platforms. - -By contrast, a standardized target environment - say .NET - -enforces ``p=1`` as far as it's concerned. This helps making ``o`` a -bit smaller by providing a higher-level base to build upon. Still, -we believe that enforcing the use of one common environment -is not necessary. PyPy's goal is to give weight to this claim - at least -as far as language implementation is concerned - showing an approach -to the ``l * o * p`` problem that does not rely on standardization. - -The most ambitious part of this goal is to `generate Just-In-Time -Compilers`_ in a language-independent way, instead of only translating -the source interpreter into an interpreter for the target platform. -This is an area of language implementation that is commonly considered -very challenging because of the involved complexity. - - -PyPy - the Python Interpreter --------------------------------------------- - -Our main motivation for developing the translation framework is to -provide a full featured, customizable, fast_ and `very compliant`_ Python -implementation, working on and interacting with a large variety of -platforms and allowing the quick introduction of new advanced language -features. - -This Python implementation is written in RPython as a relatively simple -interpreter, in some respects easier to understand than CPython, the C -reference implementation of Python. We are using its high level and -flexibility to quickly experiment with features or implementation -techniques in ways that would, in a traditional approach, require -pervasive changes to the source code. For example, PyPy's Python -interpreter can optionally provide lazily computed objects - a small -extension that would require global changes in CPython. Another example -is the garbage collection technique: changing CPython to use a garbage -collector not based on reference counting would be a major undertaking, -whereas in PyPy it is an issue localized in the translation framework, -and fully orthogonal to the interpreter source code. - - -PyPy Architecture -=========================== - -As you would expect from a project implemented using ideas from the world -of `Extreme Programming`_, the architecture of PyPy has evolved over time -and continues to evolve. Nevertheless, the high level architecture is -stable. As described above, there are two rather independent basic -subsystems: the `Python Interpreter`_ and the `Translation Framework`_. - -.. _`translation framework`: - -The Translation Framework -------------------------- - -The job of the translation tool chain is to translate RPython_ programs -into an efficient version of that program for one of various target -platforms, generally one that is considerably lower-level than Python. - -The approach we have taken is to reduce the level of abstraction of the -source RPython program in several steps, from the high level down to the -level of the target platform, whatever that may be. Currently we -support two broad flavours of target platforms: the ones that assume a -C-like memory model with structures and pointers, and the ones that -assume an object-oriented model with classes, instances and methods (as, -for example, the Java and .NET virtual machines do). - -The translation tool chain never sees the RPython source code or syntax -trees, but rather starts with the *code objects* that define the -behaviour of the function objects one gives it as input. It can be -considered as "freezing" a pre-imported RPython program into an -executable form suitable for the target platform. - -The steps of the translation process can be summarized as follows: - -* The code object of each source functions is converted to a `control - flow graph` by the `Flow Object Space`_. - -* The control flow graphs are processed by the Annotator_, which - performs whole-program type inference to annotate each variable of - the control flow graph with the types it may take at run-time. - -* The information provided by the annotator is used by the RTyper_ to - convert the high level operations of the control flow graphs into - operations closer to the abstraction level of the target platform. - -* Optionally, `various transformations`_ can then be applied which, for - example, perform optimizations such as inlining, add capabilities - such as stackless_-style concurrency, or insert code for the - `garbage collector`_. - -* Then, the graphs are converted to source code for the target platform - and compiled into an executable. - -This process is described in much more detail in the `document about -the translation process`_ and in the paper `Compiling dynamic language -implementations`_. - -.. _`control flow graph`: translation.html#the-flow-model -.. _`Flow Object Space`: objspace.html#the-flow-object-space -.. _Annotator: translation.html#the-annotation-pass -.. _RTyper: rtyper.html#overview -.. _`various transformations`: translation.html#the-optional-transformations -.. _`document about the translation process`: translation.html -.. _`garbage collector`: garbage_collection.html - - -.. _`standard interpreter`: -.. _`python interpreter`: - -The Python Interpreter -------------------------------------- - -PyPy's *Python Interpreter* is written in RPython and implements the -full Python language. This interpreter very closely emulates the -behavior of CPython. It contains the following key components: - -- a bytecode compiler responsible for producing Python code objects - from the source code of a user application; - -- a `bytecode evaluator`_ responsible for interpreting - Python code objects; - -- a `standard object space`_, responsible for creating and manipulating - the Python objects seen by the application. - -The *bytecode compiler* is the preprocessing phase that produces a -compact bytecode format via a chain of flexible passes (tokenizer, -lexer, parser, abstract syntax tree builder, bytecode generator). The -*bytecode evaluator* interprets this bytecode. It does most of its work -by delegating all actual manipulations of user objects to the *object -space*. The latter can be thought of as the library of built-in types. -It defines the implementation of the user objects, like integers and -lists, as well as the operations between them, like addition or -truth-value-testing. - -This division between bytecode evaluator and object space is very -important, as it gives a lot of flexibility. One can plug in -different `object spaces`_ to get different or enriched behaviours -of the Python objects. Additionally, a special more abstract object -space, the `flow object space`_, allows us to reuse the bytecode -evaluator for our translation framework. - -.. _`bytecode evaluator`: interpreter.html -.. _`standard object space`: objspace.html#the-standard-object-space -.. _`object spaces`: objspace.html -.. _`flow object space`: objspace.html#the-flow-object-space - -.. _`the translation framework`: - - -Further reading -=============== - -All of PyPy's documentation can be reached from the `documentation -index`_. Of particular interest after reading this document might be: - - * `getting-started`_: a hands-on guide to getting involved with the - PyPy source code. - - * `PyPy's approach to virtual machine construction`_: a paper - presented to the Dynamic Languages Symposium attached to OOPSLA - 2006. - - * `The translation document`_: a detailed description of our - translation process. - - * All our `Technical reports`_, including `Compiling dynamic language - implementations`_. - - * `JIT Generation in PyPy`_, describing how we produce a Just-in-time - Compiler from an interpreter. - -.. _`documentation index`: docindex.html -.. _`getting-started`: getting-started.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`the translation document`: translation.html -.. _`Compiling dynamic language implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`Technical reports`: index-report.html - -.. _`getting started`: getting-started.html -.. _`Extreme Programming`: http://www.extremeprogramming.org/ - -.. _fast: faq.html#how-fast-is-pypy -.. _`very compliant`: cpython_differences.html - -.. _`RPython`: coding-guide.html#rpython - -.. _Python: http://docs.python.org/ref -.. _Psyco: http://psyco.sourceforge.net -.. _stackless: stackless.html -.. _`generate Just-In-Time Compilers`: jit/index.html -.. _`JIT Generation in PyPy`: jit/index.html - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/somepbc-refactoring-plan.txt b/pypy/doc/discussion/somepbc-refactoring-plan.txt deleted file mode 100644 --- a/pypy/doc/discussion/somepbc-refactoring-plan.txt +++ /dev/null @@ -1,161 +0,0 @@ -========================== - Refactoring SomePBCs -========================== - -Motivation -========== - -Some parts of the annotator, and especially specialization, are quite obscure -and hackish. One cause for this is the need to manipulate Python objects like -functions directly. This makes it hard to attach additional information directly -to the objects. It makes specialization messy because it has to create new dummy -function objects just to represent the various specialized versions of the function. - - -Plan -==== - -Let's introduce nice wrapper objects. This refactoring is oriented towards -the following goal: replacing the content of SomePBC() with a plain set of -"description" wrapper objects. We shall probably also remove the possibility -for None to explicitly be in the set and add a can_be_None flag (this is -closer to what the other SomeXxx classes do). - - -XxxDesc classes -=============== - -To be declared in module pypy.annotator.desc, with a mapping -annotator.bookkeeper.descs = {: } -accessed with bookkeeper.getdesc(). - -Maybe later the module should be moved out of pypy.annotation but for now I -suppose that it's the best place. - -The goal is to have a single Desc wrapper even for functions and classes that -are specialized. - -FunctionDesc - - Describes (usually) a Python function object. Contains flow graphs: one - in the common case, zero for external functions, more than one if there - are several specialized versions. Also describes the signature of the - function in a nice format (i.e. not by relying on func_code inspection). - -ClassDesc - - Describes a Python class object. Generally just maps to a ClassDef, but - could map to more than one in the presence of specialization. So we get - SomePBC({}) annotations for the class, and when it's - instantiated it becomes SomeInstance(classdef=...) for the particular - selected classdef. - -MethodDesc - - Describes a bound method. Just references a FunctionDesc and a ClassDef - (not a ClassDesc, because it's read out of a SomeInstance). - -FrozenDesc - - Describes a frozen pre-built instance. That's also a good place to store - some information currently in dictionaries of the bookkeeper. - -MethodOfFrozenDesc - - Describes a method of a FrozenDesc. Just references a FunctionDesc and a - FrozenDesc. - -NB: unbound method objects are the same as function for our purposes, so they -become the same FunctionDesc as their im_func. - -These XxxDesc classes should share some common interface, as we'll see during -the refactoring. A common base class might be a good idea (at least I don't -see why it would be a bad idea :-) - - -Implementation plan -=================== - -* make a branch (/branch/somepbc-refactoring/) - -* change the definition of SomePBC, start pypy.annotation.desc - -* fix all places that use SomePBC :-) - -* turn Translator.flowgraphs into a plain list of flow graphs, - and make the FunctionDescs responsible for computing their own flow graphs - -* move external function functionality into the FunctionDescs too - - -Status -====== - -Done, branch merged. - - -RTyping PBCs of functions -========================= - -The FuncDesc.specialize() method takes an args_s and return a -corresponding graph. The caller of specialize() parses the actual -arguments provided by the simple_call or call_args operation, so that -args_s is a flat parsed list. The returned graph must have the same -number and order of input variables. - -For each call family, we compute a table like this (after annotation -finished):: - - call_shape FuncDesc1 FuncDesc2 FuncDesc3 ... - ---------------------------------------------------------- - call0 shape1 graph1 - call1 shape1 graph1 graph2 - call2 shape1 graph3 graph4 - call3 shape2 graph5 graph6 - - -We then need to merge some of the lines if they look similar enough, -e.g. call0 and call1. Precisely, we can merge two lines if they only -differ in having more or less holes. In theory, the same graph could -appear in two lines that are still not mergeable because of other -graphs. For sanity of implementation, we should check that at the end -each graph only appears once in the table (unless there is only one -*column*, in which case all problems can be dealt with at call sites). - -(Note that before this refactoring, the code was essentially requiring -that the table ended up with either one single row or one single -column.) - -The table is computed when the annotation is complete, in -compute_at_fixpoint(), which calls the FuncDesc's consider_call_site() -for each call site. The latter merges lines as soon as possible. The -table is attached to the call family, grouped by call shape. - -During RTyping, compute_at_fixpoint() is called after each new ll -helper is annotated. Normally, this should not modify existing tables -too much, but in some situations it will. So the rule is that -consider_call_site() should not add new (unmerged) rows to the table -after the table is considered "finished" (again, unless there is only -one column, in which case we should not discover new columns). - -XXX this is now out of date, in the details at least. - -RTyping other callable PBCs -=========================== - -The above picture attaches "calltable" information to the call -families containing the function. When it comes to rtyping a call of -another kind of pbc (class, instance-method, frozenpbc-method) we have -two basic choices: - - - associate the calltable information with the funcdesc that - ultimately ends up getting called, or - - - attach the calltable to the callfamily that contains the desc - that's actually being called. - -Neither is totally straightforward: the former is closer to what -happens on the trunk but new families of funcdescs need to be created -at the end of annotation or by normalisation. The latter is more of a -change. The former is also perhaps a bit unnatural for ootyped -backends. diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -5,37 +5,63 @@ from pypy.jit.metainterp.optimizeopt.heap import OptHeap from pypy.jit.metainterp.optimizeopt.string import OptString from pypy.jit.metainterp.optimizeopt.unroll import optimize_unroll, OptInlineShortPreamble +from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall +from pypy.jit.metainterp.optimizeopt.simplify import OptSimplify +from pypy.rlib.jit import PARAMETERS +from pypy.rlib.unroll import unrolling_iterable -def optimize_loop_1(metainterp_sd, loop, unroll=True, - inline_short_preamble=True, retraced=None): - """Optimize loop.operations to remove internal overheadish operations. +ALL_OPTS = [('intbounds', OptIntBounds), + ('rewrite', OptRewrite), + ('virtualize', OptVirtualize), + ('string', OptString), + ('heap', OptHeap), + ('ffi', OptFfiCall), + ('unroll', None)] +# no direct instantiation of unroll +unroll_all_opts = unrolling_iterable(ALL_OPTS) + +ALL_OPTS_DICT = dict.fromkeys([name for name, _ in ALL_OPTS]) + +ALL_OPTS_NAMES = ':'.join([name for name, _ in ALL_OPTS]) +PARAMETERS['enable_opts'] = ALL_OPTS_NAMES + +def optimize_loop_1(metainterp_sd, loop, enable_opts, + """Optimize loop.operations to remove internal overheadish operations. """ - opt_str = OptString() - optimizations = [OptIntBounds(), - OptRewrite(), - OptVirtualize(), - opt_str, - OptHeap(), - ] + optimizations = [] + unroll = 'unroll' in enable_opts + for name, opt in unroll_all_opts: + if name in enable_opts: + if opt is not None: + o = opt() + if unroll and name == 'string': + o.enabled = False + # FIXME: Workaround to disable string optimisation + # during preamble but to keep it during the loop + optimizations.append(o) + + if 'rewrite' not in enable_opts or 'virtualize' not in enable_opts: + optimizations.append(OptSimplify()) + if inline_short_preamble: - optimizations = [OptInlineShortPreamble(retraced)] + optimizations - - if metainterp_sd.jit_ffi: - from pypy.jit.metainterp.optimizeopt.fficall import OptFfiCall - optimizations = optimizations + [ - OptFfiCall(), - ] + optimizations = [OptInlineShortPreamble(retraced)] + optimizations if unroll: - opt_str.enabled = False # FIXME: Workaround to disable string optimisation - # during preamble but to keep it during the loop optimize_unroll(metainterp_sd, loop, optimizations) else: optimizer = Optimizer(metainterp_sd, loop, optimizations) optimizer.propagate_all_forward() -def optimize_bridge_1(metainterp_sd, bridge, inline_short_preamble=True, - retraced=None): +def optimize_bridge_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble=True, retrace=None): """The same, but for a bridge. """ - optimize_loop_1(metainterp_sd, bridge, False, inline_short_preamble, - retraced) + enable_opts = enable_opts.copy() + try: + del enable_opts['unroll'] + except KeyError: + pass + optimize_loop_1(metainterp_sd, bridge, enable_opts, + inline_short_preamble, retraced) + +if __name__ == '__main__': + print ALL_OPTS_NAMES diff --git a/pypy/doc/config/objspace.usemodules._lsprof.txt b/pypy/doc/config/objspace.usemodules._lsprof.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._lsprof.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_lsprof' module. diff --git a/pypy/doc/config/translation.compilerflags.txt b/pypy/doc/config/translation.compilerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.compilerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the C compiler. diff --git a/pypy/doc/interpreter.txt b/pypy/doc/interpreter.txt deleted file mode 100644 --- a/pypy/doc/interpreter.txt +++ /dev/null @@ -1,410 +0,0 @@ -=================================== -PyPy - Bytecode Interpreter -=================================== - -.. contents:: -.. sectnum:: - - -Introduction and Overview -=============================== - -This document describes the implementation of PyPy's -Bytecode Interpreter and related Virtual Machine functionalities. - -PyPy's bytecode interpreter has a structure reminiscent of CPython's -Virtual Machine: It processes code objects parsed and compiled from -Python source code. It is implemented in the `interpreter/`_ directory. -People familiar with the CPython implementation will easily recognize -similar concepts there. The major differences are the overall usage of -the `object space`_ indirection to perform operations on objects, and -the organization of the built-in modules (described `here`_). - -Code objects are a nicely preprocessed, structured representation of -source code, and their main content is *bytecode*. We use the same -compact bytecode format as CPython 2.4. Our bytecode compiler is -implemented as a chain of flexible passes (tokenizer, lexer, parser, -abstract syntax tree builder, bytecode generator). The latter passes -are based on the ``compiler`` package from the standard library of -CPython, with various improvements and bug fixes. The bytecode compiler -(living under `interpreter/astcompiler/`_) is now integrated and is -translated with the rest of PyPy. - -Code objects contain -condensed information about their respective functions, class and -module body source codes. Interpreting such code objects means -instantiating and initializing a `Frame class`_ and then -calling its ``frame.eval()`` method. This main entry point -initialize appropriate namespaces and then interprets each -bytecode instruction. Python's standard library contains -the `lib-python/2.5.2/dis.py`_ module which allows to view -the Virtual's machine bytecode instructions:: - - >>> import dis - >>> def f(x): - ... return x + 1 - >>> dis.dis(f) - 2 0 LOAD_FAST 0 (x) - 3 LOAD_CONST 1 (1) - 6 BINARY_ADD - 7 RETURN_VALUE - -CPython as well as PyPy are stack-based virtual machines, i.e. -they don't have registers but put object to and pull objects -from a stack. The bytecode interpreter is only responsible -for implementing control flow and putting and pulling black -box objects to and from this value stack. The bytecode interpreter -does not know how to perform operations on those black box -(`wrapped`_) objects for which it delegates to the `object -space`_. In order to implement a conditional branch in a program's -execution, however, it needs to gain minimal knowledge about a -wrapped object. Thus, each object space has to offer a -``is_true(w_obj)`` operation which returns an -interpreter-level boolean value. - -For the understanding of the interpreter's inner workings it -is crucial to recognize the concepts of `interpreter-level and -application-level`_ code. In short, interpreter-level is executed -directly on the machine and invoking application-level functions -leads to an bytecode interpretation indirection. However, -special care must be taken regarding exceptions because -application level exceptions are wrapped into ``OperationErrors`` -which are thus distinguished from plain interpreter-level exceptions. -See `application level exceptions`_ for some more information -on ``OperationErrors``. - -The interpreter implementation offers mechanisms to allow a -caller to be unaware if a particular function invocation leads -to bytecode interpretation or is executed directly at -interpreter-level. The two basic kinds of `Gateway classes`_ -expose either an interpreter-level function to -application-level execution (``interp2app``) or allow -transparent invocation of application-level helpers -(``app2interp``) at interpreter-level. - -Another task of the bytecode interpreter is to care for exposing its -basic code, frame, module and function objects to application-level -code. Such runtime introspection and modification abilities are -implemented via `interpreter descriptors`_ (also see Raymond Hettingers -`how-to guide for descriptors`_ in Python, PyPy uses this model extensively). - -A significant complexity lies in `function argument parsing`_. Python as a -language offers flexible ways of providing and receiving arguments -for a particular function invocation. Not only does it take special care -to get this right, it also presents difficulties for the `annotation -pass`_ which performs a whole-program analysis on the -bytecode interpreter, argument parsing and gatewaying code -in order to infer the types of all values flowing across function -calls. - -It is for this reason that PyPy resorts to generate -specialized frame classes and functions at `initialization -time`_ in order to let the annotator only see rather static -program flows with homogeneous name-value assignments on -function invocations. - -.. _`how-to guide for descriptors`: http://users.rcn.com/python/download/Descriptor.htm -.. _`annotation pass`: translation.html#the-annotation-pass -.. _`initialization time`: translation.html#initialization-time -.. _`interpreter-level and application-level`: coding-guide.html#interpreter-level -.. _`wrapped`: coding-guide.html#wrapping-rules -.. _`object space`: objspace.html -.. _`application level exceptions`: coding-guide.html#applevel-exceptions -.. _`here`: coding-guide.html#modules - - -Bytecode Interpreter Implementation Classes -================================================ - -.. _`Frame class`: -.. _`Frame`: - -Frame classes ------------------ - -The concept of Frames is pervasive in executing programs and -on virtual machines in particular. They are sometimes called -*execution frame* because they hold crucial information -regarding the execution of a Code_ object, which in turn is -often directly related to a Python `Function`_. Frame -instances hold the following state: - -- the local scope holding name-value bindings, usually implemented - via a "fast scope" which is an array of wrapped objects - -- a blockstack containing (nested) information regarding the - control flow of a function (such as ``while`` and ``try`` constructs) - -- a value stack where bytecode interpretation pulls object - from and puts results on. - -- a reference to the *globals* dictionary, containing - module-level name-value bindings - -- debugging information from which a current line-number and - file location can be constructed for tracebacks - -Moreover the Frame class itself has a number of methods which implement -the actual bytecodes found in a code object. In fact, PyPy already constructs -four specialized Frame class variants depending on the code object: - -- PyInterpFrame (in `pypy/interpreter/pyopcode.py`_) for - basic simple code objects (not involving generators or nested scopes) - -- PyNestedScopeFrame (in `pypy/interpreter/nestedscope.py`_) - for code objects that reference nested scopes, inherits from PyInterpFrame - -- PyGeneratorFrame (in `pypy/interpreter/generator.py`_) - for code objects that yield values to the caller, inherits from PyInterpFrame - -- PyNestedScopeGeneratorFrame for code objects that reference - nested scopes and yield values to the caller, inherits from both PyNestedScopeFrame - and PyGeneratorFrame - -.. _Code: - -Code Class ------------- - -PyPy's code objects contain the same information found in CPython's code objects. -They differ from Function_ objects in that they are only immutable representations -of source code and don't contain execution state or references to the execution -environment found in `Frames`. Frames and Functions have references -to a code object. Here is a list of Code attributes: - -* ``co_flags`` flags if this code object has nested scopes/generators -* ``co_stacksize`` the maximum depth the stack can reach while executing the code -* ``co_code`` the actual bytecode string - -* ``co_argcount`` number of arguments this code object expects -* ``co_varnames`` a tuple of all argument names pass to this code object -* ``co_nlocals`` number of local variables -* ``co_names`` a tuple of all names used in the code object -* ``co_consts`` a tuple of prebuilt constant objects ("literals") used in the code object -* ``co_cellvars`` a tuple of Cells containing values for access from nested scopes -* ``co_freevars`` a tuple of Cell names from "above" scopes - -* ``co_filename`` source file this code object was compiled from -* ``co_firstlineno`` the first linenumber of the code object in its source file -* ``co_name`` name of the code object (often the function name) -* ``co_lnotab`` a helper table to compute the line-numbers corresponding to bytecodes - -In PyPy, code objects also have the responsibility of creating their Frame_ objects -via the `'create_frame()`` method. With proper parser and compiler support this would -allow to create custom Frame objects extending the execution of functions -in various ways. The several Frame_ classes already utilize this flexibility -in order to implement Generators and Nested Scopes. - -.. _Function: - -Function and Method classes ----------------------------- - -The PyPy ``Function`` class (in `pypy/interpreter/function.py`_) -represents a Python function. A ``Function`` carries the following -main attributes: - -* ``func_doc`` the docstring (or None) -* ``func_name`` the name of the function -* ``func_code`` the Code_ object representing the function source code -* ``func_defaults`` default values for the function (built at function definition time) -* ``func_dict`` dictionary for additional (user-defined) function attributes -* ``func_globals`` reference to the globals dictionary -* ``func_closure`` a tuple of Cell references - -``Functions`` classes also provide a ``__get__`` descriptor which creates a Method -object holding a binding to an instance or a class. Finally, ``Functions`` -and ``Methods`` both offer a ``call_args()`` method which executes -the function given an `Arguments`_ class instance. - -.. _Arguments: -.. _`function argument parsing`: - -Arguments Class --------------------- - -The Argument class (in `pypy/interpreter/argument.py`_) is -responsible for parsing arguments passed to functions. -Python has rather complex argument-passing concepts: - -- positional arguments - -- keyword arguments specified by name - -- default values for positional arguments, defined at function - definition time - -- "star args" allowing a function to accept remaining - positional arguments - -- "star keyword args" allow a function to accept additional - arbitrary name-value bindings - -Moreover, a Function_ object can get bound to a class or instance -in which case the first argument to the underlying function becomes -the bound object. The ``Arguments`` provides means to allow all -this argument parsing and also cares for error reporting. - - -.. _`Module`: - -Module Class -------------------- - -A ``Module`` instance represents execution state usually constructed -from executing the module's source file. In addition to such a module's -global ``__dict__`` dictionary it has the following application level -attributes: - -* ``__doc__`` the docstring of the module -* ``__file__`` the source filename from which this module was instantiated -* ``__path__`` state used for relative imports - -Apart from the basic Module used for importing -application-level files there is a more refined -``MixedModule`` class (see `pypy/interpreter/mixedmodule.py`_) -which allows to define name-value bindings both at application -level and at interpreter level. See the ``__builtin__`` -module's `pypy/module/__builtin__/__init__.py`_ file for an -example and the higher level `chapter on Modules in the coding -guide`_. - -.. _`__builtin__ module`: http://codespeak.net/svn/pypy/trunk/pypy/module/ -.. _`chapter on Modules in the coding guide`: coding-guide.html#modules - -.. _`Gateway classes`: - -Gateway classes ----------------------- - -A unique PyPy property is the ability to easily cross the barrier -between interpreted and machine-level code (often referred to as -the difference between `interpreter-level and application-level`_). -Be aware that the according code (in `pypy/interpreter/gateway.py`_) -for crossing the barrier in both directions is somewhat -involved, mostly due to the fact that the type-inferring -annotator needs to keep track of the types of objects flowing -across those barriers. - -.. _typedefs: - -Making interpreter-level functions available at application-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -In order to make an interpreter-level function available at -application level, one invokes ``pypy.interpreter.gateway.interp2app(func)``. -Such a function usually takes a ``space`` argument and any number -of positional arguments. Additionally, such functions can define -an ``unwrap_spec`` telling the ``interp2app`` logic how -application-level provided arguments should be unwrapped -before the actual interpreter-level function is invoked. -For example, `interpreter descriptors`_ such as the ``Module.__new__`` -method for allocating and constructing a Module instance are -defined with such code:: - - Module.typedef = TypeDef("module", - __new__ = interp2app(Module.descr_module__new__.im_func, - unwrap_spec=[ObjSpace, W_Root, Arguments]), - __init__ = interp2app(Module.descr_module__init__), - # module dictionaries are readonly attributes - __dict__ = GetSetProperty(descr_get_dict, cls=Module), - __doc__ = 'module(name[, doc])\n\nCreate a module object...' - ) - -The actual ``Module.descr_module__new__`` interpreter-level method -referenced from the ``__new__`` keyword argument above is defined -like this:: - - def descr_module__new__(space, w_subtype, __args__): - module = space.allocate_instance(Module, w_subtype) - Module.__init__(module, space, None) - return space.wrap(module) - -Summarizing, the ``interp2app`` mechanism takes care to route -an application level access or call to an internal interpreter-level -object appropriately to the descriptor, providing enough precision -and hints to keep the type-inferring annotator happy. - - -Calling into application level code from interpreter-level -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -Application level code is `often preferable`_. Therefore, -we often like to invoke application level code from interpreter-level. -This is done via the Gateway's ``app2interp`` mechanism -which we usually invoke at definition time in a module. -It generates a hook which looks like an interpreter-level -function accepting a space and an arbitrary number of arguments. -When calling a function at interpreter-level the caller side -does usually not need to be aware if its invoked function -is run through the PyPy interpreter or if it will directly -execute on the machine (after translation). - -Here is an example showing how we implement the Metaclass -finding algorithm of the Python language in PyPy:: - - app = gateway.applevel(r''' - def find_metaclass(bases, namespace, globals, builtin): - if '__metaclass__' in namespace: - return namespace['__metaclass__'] - elif len(bases) > 0: - base = bases[0] - if hasattr(base, '__class__'): - return base.__class__ - else: - return type(base) - elif '__metaclass__' in globals: - return globals['__metaclass__'] - else: - try: - return builtin.__metaclass__ - except AttributeError: - return type - ''', filename=__file__) - - find_metaclass = app.interphook('find_metaclass') - -The ``find_metaclass`` interpreter-level hook is invoked -with five arguments from the ``BUILD_CLASS`` opcode implementation -in `pypy/interpreter/pyopcode.py`_:: - - def BUILD_CLASS(f): - w_methodsdict = f.valuestack.pop() - w_bases = f.valuestack.pop() - w_name = f.valuestack.pop() - w_metaclass = find_metaclass(f.space, w_bases, - w_methodsdict, f.w_globals, - f.space.wrap(f.builtin)) - w_newclass = f.space.call_function(w_metaclass, w_name, - w_bases, w_methodsdict) - f.valuestack.push(w_newclass) - -Note that at a later point we can rewrite the ``find_metaclass`` -implementation at interpreter-level and we would not have -to modify the calling side at all. - -.. _`often preferable`: coding-guide.html#app-preferable -.. _`interpreter descriptors`: - -Introspection and Descriptors ------------------------------- - -Python traditionally has a very far-reaching introspection model -for bytecode interpreter related objects. In PyPy and in CPython read -and write accesses to such objects are routed to descriptors. -Of course, in CPython those are implemented in ``C`` while in -PyPy they are implemented in interpreter-level Python code. - -All instances of a Function_, Code_, Frame_ or Module_ classes -are also ``Wrappable`` instances which means they can be represented -at application level. These days, a PyPy object space needs to -work with a basic descriptor lookup when it encounters -accesses to an interpreter-level object: an object space asks -a wrapped object for its type via a ``getclass`` method and then -calls the type's ``lookup(name)`` function in order to receive a descriptor -function. Most of PyPy's internal object descriptors are defined at the -end of `pypy/interpreter/typedef.py`_. You can use these definitions -as a reference for the exact attributes of interpreter classes visible -at application level. - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._codecs.txt b/pypy/doc/config/objspace.usemodules._codecs.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._codecs.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_codecs' module. -Used by the 'codecs' standard lib module. This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.unicodedata.txt b/pypy/doc/config/objspace.usemodules.unicodedata.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.unicodedata.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'unicodedata' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/translation.no__thread.txt b/pypy/doc/config/translation.no__thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.no__thread.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't use gcc __thread attribute for fast thread local storage -implementation . Increases the chance that moving the resulting -executable to another same processor Linux machine will work. (see -:config:`translation.vanilla`). diff --git a/pypy/doc/config/translation.backendopt.inline.txt b/pypy/doc/config/translation.backendopt.inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs based on an heuristic, the default one considers -essentially the a weight for the flowgraph based on the number of -low-level operations in them (see -:config:`translation.backendopt.inline_threshold` ). - -Some amount of inlining in order to have RPython builtin type helpers -inlined is needed for malloc removal -(:config:`translation.backendopt.mallocs`) to be effective. - -This optimization is used by default. diff --git a/pypy/doc/config/translation.countmallocs.txt b/pypy/doc/config/translation.countmallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.countmallocs.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal; used by some of the C backend tests to check that the number of -allocations matches the number of frees. - -.. internal diff --git a/pypy/doc/config/objspace.std.newshortcut.txt b/pypy/doc/config/objspace.std.newshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.newshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: cache and shortcut calling __new__ from builtin types diff --git a/pypy/doc/discussion/translation-swamp.txt b/pypy/doc/discussion/translation-swamp.txt deleted file mode 100644 --- a/pypy/doc/discussion/translation-swamp.txt +++ /dev/null @@ -1,30 +0,0 @@ -=================================================================== -List of things that need to be improved for translation to be saner -=================================================================== - - - * understand nondeterminism after rtyping - - * experiment with different heuristics: - - * weigh backedges more (TESTING) - * consider size of outer function - * consider number of arguments (TESTING) - - * find a more deterministic inlining order (TESTING using number of callers) - - * experiment with using a base inlining threshold and then drive inlining by - malloc removal possibilities (using escape analysis) - - * move the inlining of gc helpers just before emitting the code. - throw the graph away (TESTING, need to do a new framework translation) - - * for gcc: use just one implement file (TRIED: turns out to be a bad idea, - because gcc uses too much ram). Need to experiment more now that - inlining should at least be more deterministic! - -things to improve the framework gc -================================== - - * find out whether a function can collect - diff --git a/pypy/doc/config/translation.insist.txt b/pypy/doc/config/translation.insist.txt deleted file mode 100644 --- a/pypy/doc/config/translation.insist.txt +++ /dev/null @@ -1,4 +0,0 @@ -Don't stop on the first `rtyping`_ error. Instead, try to rtype as much as -possible and show the collected error messages in the end. - -.. _`rtyping`: ../rtyper.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt b/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_METHOD.txt +++ /dev/null @@ -1,10 +0,0 @@ -Enable a pair of bytecodes that speed up method calls. -See ``pypy.interpreter.callmethod`` for a description. - -The goal is to avoid creating the bound method object in the common -case. So far, this only works for calls with no keyword, no ``*arg`` -and no ``**arg`` but it would be easy to extend. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#lookup-method-call-method diff --git a/pypy/doc/download.txt b/pypy/doc/download.txt deleted file mode 100644 --- a/pypy/doc/download.txt +++ /dev/null @@ -1,7 +0,0 @@ - -Download one of the following release files: -============================================= - -Download page has moved to `pypy.org`_. - -.. _`pypy.org`: http://pypy.org/download.html diff --git a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt b/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.opcodes.CALL_LIKELY_BUILTIN.txt +++ /dev/null @@ -1,12 +0,0 @@ -Introduce a new opcode called ``CALL_LIKELY_BUILTIN``. It is used when something -is called, that looks like a builtin function (but could in reality be shadowed -by a name in the module globals). For all module globals dictionaries it is -then tracked which builtin name is shadowed in this module. If the -``CALL_LIKELY_BUILTIN`` opcode is executed, it is checked whether the builtin is -shadowed. If not, the corresponding builtin is called. Otherwise the object that -is shadowing it is called instead. If no shadowing is happening, this saves two -dictionary lookups on calls to builtins. - -For more information, see the section in `Standard Interpreter Optimizations`_. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#call-likely-builtin diff --git a/pypy/doc/config/translation.backendopt.storesink.txt b/pypy/doc/config/translation.backendopt.storesink.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.storesink.txt +++ /dev/null @@ -1,1 +0,0 @@ -Store sinking optimization. On by default. diff --git a/pypy/doc/carbonpython.txt b/pypy/doc/carbonpython.txt deleted file mode 100644 --- a/pypy/doc/carbonpython.txt +++ /dev/null @@ -1,230 +0,0 @@ -================================================== -CarbonPython, aka C# considered harmful -================================================== - -CarbonPython overview -===================== - -CarbonPython is an experimental RPython to .NET compiler. Its main -focus is to produce DLLs to be used by other .NET programs, not -standalone executables; if you want to compile an RPython standalone -program, have a look to `translate.py`_. - -Compiled RPython programs are much faster (up to 250x) than -interpreted IronPython programs, hence it might be a convenient -replacement for C# when more speed is needed. RPython programs can be -as fast as C# programs. - -RPython is a restrict subset of Python, static enough to be analyzed -and compiled efficiently to lower level languages. To read more about -the RPython limitations read the `RPython description`_. - -**Disclaimer**: RPython is a much less convenient language than Python -to program with. If you do not need speed, there is no reason to look -at RPython. - -**Big disclaimer**: CarbonPython is still in a pre-alpha stage: it's -not meant to be used for production code, and the API might change in -the future. Despite this, it might be useful in some situations and -you are encouraged to try it by yourself. Suggestions, bug-reports and -even better patches are welcome. - -.. _`RPython description`: coding-guide.html#restricted-python -.. _`translate.py`: faq.html#how-do-i-compile-my-own-interpreters - - -Quick start -=========== - -Suppose you want to write a little DLL in RPython and call its -function from C#. - -Here is the file mylibrary.py:: - - from pypy.translator.cli.carbonpython import export - - @export(int, int) - def add(x, y): - return x+y - - @export(int, int) - def sub(x, y): - return x-y - - -And here the C# program main.cs:: - - using System; - public class CarbonPythonTest - { - public static void Main() - { - Console.WriteLine(mylibrary.add(40, 2)); - Console.WriteLine(mylibrary.sub(44, 2)); - } - } - -Once the files have been created, you can compile ``mylibrary.py`` -with CarbonPython to get the corresponding DLL:: - - $ python carbonpython.py mylibrary.py - ... lot of stuff - -Then, we compile main.cs into an executable, being sure to add a -reference to the newly created ``mylibrary.dll``:: - - # with mono on linux - $ gmcs /r:mylibrary.dll main.cs - - # with Microsoft CLR on windows - c:\> csc /r:mylibrary main.cs - -Now we can run the executable to see whether the answers are right:: - - $ mono main.exe - 42 - 42 - - -Multiple entry-points -===================== - -In RPython, the type of each variable is inferred by the `Annotator`_: -the annotator analyzed the whole program top-down starting from an -entry-point, i.e. a function whose we specified the types of the -parameters. - -This approach works for a standalone executables, but not for a -library that by definition is composed by more than one -entry-point. Thus, you need to explicitly specify which functions you -want to include in your DLL, together with the expected input types. - -To mark a function as an entry-point, you use the ``@export`` -decorator, which is defined in ``pypy.translator.cli.carbonpython``, -as shown by the previous example. Note that you do not need to -specify the return type, because it is automatically inferenced by the -annotator. - -.. _`Annotator`: translation.html#annotator - - -Namespaces -========== - -Since `CLS`_ (Common Language Specification) does not support module -level static methods, RPython functions marked as entry-points are -compiled to static methods of a class, in order to be accessible by -every CLS-compliant language such as C# or VB.NET. - -The class which each function is placed in depends on its -**namespace**; for example, if the namespace of a function ``foo`` is -``A.B.C``, the function will be rendered as a static method of the -``C`` class inside the ``A.B`` namespace. This allows C# and -IronPython code to call the function using the intuitive ``A.B.C.foo`` -syntax. - -By default, the default namespace for exported function is the same as -the name of the module. Thus in the previous example the default -namespace is ``mylibrary`` and the functions are placed inside the -corresponding class in the global namespace. - -You can change the default namespace by setting the ``_namespace_`` -variable in the module you are compiling:: - - _namespace_ = 'Foo.Bar' - - @export(int, int) - def f(x, y): - pass - -Finally, you can also set a specific namespace on a per-function -basis, using the appropriate keyword argument of the ``@export`` -decorator:: - - @export(int, int, namespace='Foo.Bar') - def f(x, y): - pass - - -.. _`CLS`: http://www.ecma-international.org/publications/files/ECMA-ST/Ecma-335.pdf - - -Exporting classes -================= - -RPython libraries can also export classes: to export a class, add the -``@export`` decorator to its ``__init__`` method; similarly, you can -also export any methods of the class:: - - class MyClass: - - @export(int) - def __init__(self, x): - self.x = x - - @export - def getx(self): - return self.x - - -Note that the type of ``self`` must not be specified: it will -automatically assumed to be ``MyClass``. - -The ``__init__`` method is not automatically mapped to the .NET -constructor; to properly initialize an RPython object from C# or -IronPython code you need to explicitly call ``__init__``; for example, -in C#:: - - MyClass obj = new MyClass(); - obj.__init__(x); - -Note that this is needed only when calling RPython code from -outside; the RPython compiler automatically calls ``__init__`` -whenever an RPython class is instantiated. - -In the future this discrepancy will be fixed and the ``__init__`` -method will be automatically mapped to the constructor. - - -Accessing .NET libraries -======================== - -**Warning**: the API for accessing .NET classes from RPython is highly -experimental and will probably change in the future. - -In RPython you can access native .NET classes through the ``CLR`` -object defined in ``translator.cli.dotnet``: from there, you can -navigate through namespaces using the usual dot notation; for example, -``CLR.System.Collections.ArrayList`` refers to the ``ArrayList`` class -in the ``System.Collections`` namespace. - -To instantiate a .NET class, simply call it:: - - ArrayList = CLR.System.Collections.ArrayList - def foo(): - obj = ArrayList() - obj.Add(42) - return obj - -At the moment there is no special syntax support for indexers and -properties: for example, you can't access ArrayList's elements using -the square bracket notation, but you have to call the call the -``get_Item`` and ``set_Item`` methods; similarly, to access a property -``XXX`` you need to call ``get_XXX`` and ``set_XXX``:: - - def foo(): - obj = ArrayList() - obj.Add(42) - print obj.get_Item(0) - print obj.get_Count() - -Static methods and are also supported, as well as overloadings:: - - Math = CLR.System.Math - def foo(): - print Math.Abs(-42) - print Math.Abs(-42.0) - - -At the moment, it is not possible to reference assemblies other than -mscorlib. This will be fixed soon. diff --git a/pypy/doc/__pypy__-module.txt b/pypy/doc/__pypy__-module.txt deleted file mode 100644 --- a/pypy/doc/__pypy__-module.txt +++ /dev/null @@ -1,86 +0,0 @@ -======================= -The ``__pypy__`` module -======================= - -The ``__pypy__`` module is the main entry point to special features provided -by PyPy's standard interpreter. Its content depends on `configuration options`_ -which may add new functionality and functions whose existence or non-existence -indicates the presence of such features. - -.. _`configuration options`: config/index.html - -Generally available functionality -================================= - - - ``internal_repr(obj)``: return the interpreter-level representation of an - object. - - ``bytebuffer(length)``: return a new read-write buffer of the given length. - It works like a simplified array of characters (actually, depending on the - configuration the ``array`` module internally uses this). - -Thunk Object Space Functionality -================================ - -When the thunk object space is used (choose with :config:`objspace.name`), -the following functions are put into ``__pypy__``: - - - ``thunk`` - - ``is_thunk`` - - ``become`` - - ``lazy`` - -Those are all described in the `interface section of the thunk object space -docs`_. - -For explanations and examples see the `thunk object space docs`_. - -.. _`thunk object space docs`: objspace-proxies.html#thunk -.. _`interface section of the thunk object space docs`: objspace-proxies.html#thunk-interface - -Taint Object Space Functionality -================================ - -When the taint object space is used (choose with :config:`objspace.name`), -the following names are put into ``__pypy__``: - - - ``taint`` - - ``is_tainted`` - - ``untaint`` - - ``taint_atomic`` - - ``_taint_debug`` - - ``_taint_look`` - - ``TaintError`` - -Those are all described in the `interface section of the taint object space -docs`_. - -For more detailed explanations and examples see the `taint object space docs`_. - -.. _`taint object space docs`: objspace-proxies.html#taint -.. _`interface section of the taint object space docs`: objspace-proxies.html#taint-interface - -Transparent Proxy Functionality -=============================== - -If `transparent proxies`_ are enabled (with :config:`objspace.std.withtproxy`) -the following functions are put into ``__pypy__``: - - - ``tproxy(typ, controller)``: Return something that looks like it is of type - typ. Its behaviour is completely controlled by the controller. See the docs - about `transparent proxies`_ for detail. - - - ``get_tproxy_controller(obj)``: If obj is really a transparent proxy, return - its controller. Otherwise return None. - -.. _`transparent proxies`: objspace-proxies.html#tproxy - - -Functionality available on py.py (not after translation) -======================================================== - - - ``isfake(obj)``: returns True if ``obj`` is faked. - - - ``interp_pdb()``: start a pdb at interpreter-level. - - - diff --git a/pypy/doc/config/objspace.std.withstrslice.txt b/pypy/doc/config/objspace.std.withstrslice.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrslice.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string slice" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-slice-objects - - diff --git a/pypy/doc/config/objspace.std.withprebuiltint.txt b/pypy/doc/config/objspace.std.withprebuiltint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withprebuiltint.txt +++ /dev/null @@ -1,5 +0,0 @@ -This option enables the caching of small integer objects (similar to what -CPython does). The range of which integers are cached can be influenced with -the :config:`objspace.std.prebuiltintfrom` and -:config:`objspace.std.prebuiltintto` options. - diff --git a/pypy/doc/config/objspace.usemodules.errno.txt b/pypy/doc/config/objspace.usemodules.errno.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.errno.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'errno' module. -This module is expected to be working and is included by default. diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -21,7 +21,7 @@ from pypy.rlib.objectmodel import specialize from pypy.jit.codewriter.jitcode import JitCode, SwitchDictDescr, MissingLiveness from pypy.jit.codewriter import heaptracker, longlong -from pypy.jit.metainterp.optimizeutil import RetraceLoop +from pypy.jit.metainterp.optimizeutil import RetraceLoop, args_dict_box, args_dict # ____________________________________________________________ @@ -834,7 +834,7 @@ jcposition, redboxes): resumedescr = compile.ResumeAtPositionDescr() self.capture_resumedata(resumedescr, orgpc) - + any_operation = len(self.metainterp.history.operations) > 0 jitdriver_sd = self.metainterp.staticdata.jitdrivers_sd[jdindex] self.verify_green_args(jitdriver_sd, greenboxes) @@ -852,7 +852,7 @@ "found a loop_header for a JitDriver that does not match " "the following jit_merge_point's") self.metainterp.seen_loop_header_for_jdindex = -1 - + # if not self.metainterp.in_recursion: assert jitdriver_sd is self.metainterp.jitdriver_sd @@ -1275,11 +1275,6 @@ self._addr2name_keys = [key for key, value in list_of_addr2name] self._addr2name_values = [value for key, value in list_of_addr2name] - def setup_jitdrivers_sd(self, optimizer): - if optimizer is not None: - for jd in self.jitdrivers_sd: - jd.warmstate.set_param_optimizer(optimizer) - def finish_setup(self, codewriter, optimizer=None): from pypy.jit.metainterp.blackhole import BlackholeInterpBuilder self.blackholeinterpbuilder = BlackholeInterpBuilder(codewriter, self) @@ -1293,7 +1288,6 @@ self.jitdrivers_sd = codewriter.callcontrol.jitdrivers_sd self.virtualref_info = codewriter.callcontrol.virtualref_info self.callinfocollection = codewriter.callcontrol.callinfocollection - self.setup_jitdrivers_sd(optimizer) # # store this information for fastpath of call_assembler # (only the paths that can actually be taken) @@ -1416,6 +1410,7 @@ self.free_frames_list = [] self.last_exc_value_box = None self.retracing_loop_from = None + self.call_pure_results = args_dict_box() def perform_call(self, jitcode, boxes, greenkey=None): # causes the metainterp to enter the given subfunction @@ -1423,10 +1418,13 @@ f.setup_call(boxes) raise ChangeFrame + def is_main_jitcode(self, jitcode): + return self.jitdriver_sd is not None and jitcode is self.jitdriver_sd.mainjitcode + def newframe(self, jitcode, greenkey=None): if jitcode.is_portal: self.in_recursion += 1 - if greenkey is not None: + if greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (greenkey, len(self.history.operations))) if len(self.free_frames_list) > 0: @@ -1439,9 +1437,10 @@ def popframe(self): frame = self.framestack.pop() - if frame.jitcode.is_portal: + jitcode = frame.jitcode + if jitcode.is_portal: self.in_recursion -= 1 - if frame.greenkey is not None: + if frame.greenkey is not None and self.is_main_jitcode(jitcode): self.portal_trace_positions.append( (None, len(self.history.operations))) # we save the freed MIFrames to avoid needing to re-create new @@ -1632,6 +1631,7 @@ warmrunnerstate = self.jitdriver_sd.warmstate if len(self.history.operations) > warmrunnerstate.trace_limit: greenkey_of_huge_function = self.find_biggest_function() + self.staticdata.stats.record_aborted(greenkey_of_huge_function) self.portal_trace_positions = None if greenkey_of_huge_function is not None: warmrunnerstate.disable_noninlinable_function( @@ -1719,7 +1719,7 @@ dont_change_position = True else: dont_change_position = False - try: + try: self.prepare_resume_from_failure(key.guard_opnum, dont_change_position) if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(ABORT_BRIDGE) @@ -1924,7 +1924,7 @@ self.history.inputargs = original_inputargs self.history.operations = self.history.operations[:start] - + self.history.record(rop.JUMP, bridge_arg_boxes[num_green_args:], None) try: target_loop_token = compile.compile_new_bridge(self, @@ -2279,7 +2279,9 @@ return resbox_as_const # not all constants (so far): turn CALL into CALL_PURE, which might # be either removed later by optimizeopt or turned back into CALL. - newop = op.copy_and_change(rop.CALL_PURE, args=[resbox_as_const]+op.getarglist()) + arg_consts = [a.constbox() for a in op.getarglist()] + self.call_pure_results[arg_consts] = resbox_as_const + newop = op.copy_and_change(rop.CALL_PURE, args=op.getarglist()) self.history.operations[-1] = newop return resbox diff --git a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt b/pypy/doc/discussion/pypy_metaclasses_in_cl.txt deleted file mode 100644 --- a/pypy/doc/discussion/pypy_metaclasses_in_cl.txt +++ /dev/null @@ -1,139 +0,0 @@ -IRC log -======= - -:: - - [09:41] arigo: is it possible to ask the backendoptimizer to completely remove all the oogetfield('meta', obj)? - [09:42] and at the same time to change all the oogetfield('somefield', meta) into oogetfield('somefield', obj) - [09:42] because then we wouldn't need the metaclass hierarchy anymore - [09:42] (at least in common lisp) - [09:42] as far as I know the idea was indeed to be able to do this kind of things - [09:43] but not necessarily in the existing backendopt - [09:44] uhmmm - [09:44] I have no idea how to do this stuff - [09:44] if I understand it correctly, as a first step you can just tweak gencl to recognize oogetfield('meta', obj) - [09:44] I'll think about it on the plane maybe - [09:44] and produce a same_as equivalent instead - [09:44] (do I make any sense at all?) - [09:44] yes - [09:45] same_as(meta, obj) - [09:45] so that the next oogetfield() will still work on meta which in reality is the obj - [09:45] yes - [09:45] thus you obtained the same thing without removing anything - [09:45] cool - [09:46] dialtone: can you explain me better what are you trying to do? - [09:46] it looks kinda simple - [09:46] am I a fool? - [09:46] antocuni: I want to get rid of the metaclass stuff in common lisp - [09:47] since common lisp supports class variables - [09:47] (DEFCLASS foo () ((bar :allocate :class))) - [09:47] cool - [09:47] but to do that I also have to get rid of the opcodes that work on the object model - [09:48] at first I thought about removing the metaclass related operations (or change them) but armin got a great idea about using same_as - [09:48] idnar (i=mithrand at unaffiliated/idnar) left irc: Remote closed the connection - [09:48] there might be a few problems, though - [09:48] and here comes the part I feared - [09:48] I'm not sure if the meta object is used for more than oogetfields - [09:49] and also, let's see if there are name clashes in the fields - [09:49] I can't understand a thing: are you trying to lookup some fields in the obj directly, instead of in the metclass, right? - [09:49] antocuni: yes - [09:50] why an object should have fields that belongs to its metaclass? - [09:50] arigo: uhmmm you can have both a class variable and an instance variable named in the same way? - [09:50] metaclass is not a real metaclass - [09:50] I don't know - [09:50] arigo - r26566 - Support geterrno() from rctypes to genc. - [09:50] dialtone: ah, now I understand - [09:50] I would expect it not to be the case, as the names come from RPython names - [09:51] arigo: indeed - [09:51] but I guess I can set different accessors maybe for class level things and for instance level things - [09:51] let's try - [09:51] no... - [09:52] so a name clash would break stuff - [09:52] but... how do you recognize an access to a class variable and one to an instance variable from RPython? - [09:53] dialtone: I think we don't have name clashes, because there is some mangling anyway - [09:53] cool - [09:53] if I see it correctly, class variable names start with 'pbc' and instance ones with 'o' - [09:53] that's what we've done in gencl yes - [09:54] ? that's what the ootyping is doing - [09:54] yes yes - [09:54] :-) - [09:54] I mean that I see the distinction in gencl :) - [09:54] sooooooo - [09:55] if I have a getfield where the first argument is meta and I simply emit the same code that I emit for the same_as I should be safe removing all the meta stuff... maybe - [09:55] seems like a tiny change in gencl - [09:55] dialtone: in RPython, the annotator says that attributes are instance fields as soon as they are written to instances, otherwise they are class attributes - [09:56] yes, it should work - [09:56] Palats (n=Pierre at izumi.palats.com) left irc: Read error: 104 (Connection reset by peer) - [09:56] unless of course metaclasses are used for something else than class variables - [09:56] ideally, you should not look for the name 'meta' but for some other hint - [09:57] I'm not completely at ease with the various levels of ootype - [09:57] neither am I\ - [09:57] all field names other than those defined by ootype (like "meta") will be mangled, so i guess checking for "meta" is good enough - [09:57] and I also have to ignore the setfield opcode that deals with metaclasses - [09:58] or make it a same_as as well - [09:59] apparently, the meta instances are used as the ootype of RPython classes - [10:00] so they can be manipulated by RPython code that passes classes around - [10:01] I guess you can also pass classes around in CL, read attributes from them, and instantiate them - [10:01] yes - [10:01] so a saner approach might be to try to have gencl use CL classes instead of these meta instances - [10:03] uhmmmmm - [10:03] which means: recognize if an ootype.Instance is actually representing an RPython class (by using a hint) - [10:03] I also have to deal with the Class_ - [10:03] but that can probably be set to standard-class - [10:03] yes, I think it's saner to make, basically, oogetfield('class_') be a same_as - [10:04] cool - [10:04] I think I'll save this irc log to put it in the svn tree for sanxiyn - [10:04] to recognize RPython class represenations: if the ootype.Instance has the superclass ootypesystem.rclass.CLASSTYPE, then it's a "metaclass" - [10:04] he is thinking about this in the plane (at least this is what he told) - [10:05] :-) - [10:05] nikh: yes - [10:05] ootype is indeed rather complicated, level-wise, to support limited languages like Java - [10:05] unfortunately, yes - [10:05] well, in a way it's very convenient for the backends - [10:05] but if you want to use more native constructs, it gets hairy quickly - [10:05] I dunno - [10:05] depends on the backend - [10:06] hum, there is still an information missing that gencl would need here - [10:06] I think if the language of the backend is powerful enough it could use an higher abstraction - [10:07] dialtone: yes, there is also the (hairly to implement) idea of producing slightly different things for different back-ends too - [10:07] using backendopts? - [10:08] would it make sense to have a kind of backend_supports=['metaclasses', 'classvariables', 'first_class_functions'...] - [10:08] maybe, but I was thinking about doing different things in ootypesystem/rclass already - [10:08] yes, such a backend_supports would be great - [10:09] dialtone: there is still an hour left to sprint, so go go go ;) - [10:09] you can do it, if you want it ;) - [10:09] what is missing is the link from the concrete Instance types, and which Instance corresponds to its meta-instance - [10:10] idnar (i=mithrand at unaffiliated/idnar) joined #pypy. - [10:10] dialtone: it's not as simple as making an oogetfield be a same_as - [10:10] KnowledgeUnboundError, Missing documentation in slot brain - [10:10] right now for CL the goal would be to generate for a normal Instance, a DEFCLASS whose :allocate :class attributes are the attributes of the meta-Instance - [10:11] we could optionally have class fields in Instances, and then operations like ooget/setclassfield - [10:11] the reason why I ask is that if we manage to do this then we could also use default Condition as Exception - [10:11] and we could map the Conditions in common lisp to exceptions in python transparently - [10:12] since the object systems will then match (and they are vaguely similar anyway) - [10:12] nice - [10:12] at least I think - [10:18] I'm still rather confused by ootypesystem/rclass - [10:18] although I think that blame would show my name on quite some bits :-) - [10:19] there are no class attributes read through instances - [10:19] they are turned into method calls - [10:19] accessor methods - [10:20] it's a bit organically grown - [10:20] accessor methods were introduced at one point, and the meta-Instance later - [10:21] uhmmm - [10:22] what was the reason for having accessor methods? - [10:22] they seem to be only generated for class vars that are overriden in subclasses. - [10:22] yes - [10:22] before we had the meta-Instance trick, it was the only way to avoid storing the value in all instances - [10:22] aha - [10:23] we could possibly get rid of these accessors - [10:23] now, yes, by storing the values in the meta-Instance - [10:23] they are alway anyway stored in the meta-Instance, I think - [10:23] no, I think that other values are stored in the meta-Instance right now - [10:24] it's the values that are only ever accessed with a syntax 'ClassName.attr', i.e. not through an instance - [10:24] ...more precisely, with 'x = ClassName or OtherClassName; x.attr' - [10:25] hm, i'm still trying to read this out of the code ... - [10:28] it's in ClassRepr._setup_repr() - [10:28] there is no clsfields here, just pbcfields - [10:28] # attributes showing up in getattrs done on the class as a PBC - [10:28] i see diff --git a/pypy/doc/config/translation.withsmallfuncsets.txt b/pypy/doc/config/translation.withsmallfuncsets.txt deleted file mode 100644 --- a/pypy/doc/config/translation.withsmallfuncsets.txt +++ /dev/null @@ -1,3 +0,0 @@ -Represent function sets smaller than this option's value as an integer instead -of a function pointer. A call is then done via a switch on that integer, which -allows inlining etc. Small numbers for this can speed up PyPy (try 5). diff --git a/pypy/doc/config/translation.backendopt.remove_asserts.txt b/pypy/doc/config/translation.backendopt.remove_asserts.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.remove_asserts.txt +++ /dev/null @@ -1,1 +0,0 @@ -Remove raising of assertions from the flowgraphs, which might give small speedups. diff --git a/.hgsub b/.hgsub deleted file mode 100644 --- a/.hgsub +++ /dev/null @@ -1,3 +0,0 @@ -greenlet = [svn]http://codespeak.net/svn/greenlet/trunk/c -testrunner = [svn]http://codespeak.net/svn/pypy/build/testrunner -lib_pypy/pyrepl = [svn]http://codespeak.net/svn/pyrepl/trunk/pyrepl/pyrepl diff --git a/pypy/doc/config/translation.ootype.txt b/pypy/doc/config/translation.ootype.txt deleted file mode 100644 --- a/pypy/doc/config/translation.ootype.txt +++ /dev/null @@ -1,1 +0,0 @@ -This group contains options specific for ootypesystem. diff --git a/pypy/doc/config/objspace.usemodules.termios.txt b/pypy/doc/config/objspace.usemodules.termios.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.termios.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'termios' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.usemodules.cStringIO.txt b/pypy/doc/config/objspace.usemodules.cStringIO.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.cStringIO.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in cStringIO module. - -If not enabled, importing cStringIO gives you the app-level -implementation from the standard library StringIO module. diff --git a/pypy/doc/config/objspace.usemodules.thread.txt b/pypy/doc/config/objspace.usemodules.thread.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.thread.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'thread' module. diff --git a/pypy/doc/config/objspace.std.logspaceoptypes.txt b/pypy/doc/config/objspace.std.logspaceoptypes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.logspaceoptypes.txt +++ /dev/null @@ -1,4 +0,0 @@ -.. internal - -Wrap "simple" bytecode implementations like BINARY_ADD with code that collects -information about which types these bytecodes receive as arguments. diff --git a/pypy/doc/discussion/chained_getattr.txt b/pypy/doc/discussion/chained_getattr.txt deleted file mode 100644 --- a/pypy/doc/discussion/chained_getattr.txt +++ /dev/null @@ -1,70 +0,0 @@ - - -"chained getattr/module global lookup" optimization -(discussion during trillke-sprint 2007, anto/holger, -a bit of samuele and cf earlier on) - -random example: - - code: - import os.path - normed = [os.path.normpath(p) for p in somelist] - bytecode: - [...] - LOAD_GLOBAL (os) - LOAD_ATTR (path) - LOAD_ATTR (normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - would be turned by pypy-compiler into: - - LOAD_CHAINED_GLOBAL (os,path,normpath) - LOAD_FAST (p) - CALL_FUNCTION 1 - - now for the LOAD_CHAINED_GLOBAL bytecode implementation: - - Module dicts have a special implementation, providing: - - - an extra "fastlookup" rpython-dict serving as a cache for - LOAD_CHAINED_GLOBAL places within the modules: - - * keys are e.g. ('os', 'path', 'normpath') - - * values are tuples of the form: - ([obj1, obj2, obj3], [ver1, ver2]) - - "ver1" refer to the version of the globals of "os" - "ver2" refer to the version of the globals of "os.path" - "obj3" is the resulting "normpath" function - - - upon changes to the global dict, "fastlookup.clear()" is called - - - after the fastlookup entry is filled for a given - LOAD_CHAINED_GLOBAL index, the following checks need - to be performed in the bytecode implementation:: - - value = f_globals.fastlookup.get(key, None) - if value is None: - # fill entry - else: - # check that our cached lookups are still valid - assert isinstance(value, tuple) - objects, versions = value - i = 0 - while i < len(versions): - lastversion = versions[i] - ver = getver_for_obj(objects[i]) - if ver == -1 or ver != lastversion: - name = key[i] - objects[i] = space.getattr(curobj, name) - versions[i] = ver - curobj = objects[i] - i += 1 - return objects[i] - - def getver_for_obj(obj): - if "obj is not Module": - return -1 - return obj.w_dict.version diff --git a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt b/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.clever_malloc_removal_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for clever malloc removal (:config:`translation.backendopt.clever_malloc_removal`). diff --git a/pypy/doc/config/objspace.std.builtinshortcut.txt b/pypy/doc/config/objspace.std.builtinshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.builtinshortcut.txt +++ /dev/null @@ -1,5 +0,0 @@ -A shortcut speeding up primitive operations between built-in types. - -This is a space-time trade-off: at the moment, this option makes a -translated pypy-c executable bigger by about 1.7 MB. (This can probably -be improved with careful analysis.) diff --git a/pypy/doc/config/objspace.std.withmapdict.txt b/pypy/doc/config/objspace.std.withmapdict.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withmapdict.txt +++ /dev/null @@ -1,5 +0,0 @@ -Enable the new version of "sharing dictionaries". - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#sharing-dicts diff --git a/pypy/doc/extradoc.txt b/pypy/doc/extradoc.txt deleted file mode 100644 --- a/pypy/doc/extradoc.txt +++ /dev/null @@ -1,349 +0,0 @@ -================================================= -PyPy - papers, talks and related projects -================================================= - -Papers ----------------------------------- - -*Articles about PyPy published so far, most recent first:* (bibtex_ file) - -* `High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`_, - A. Cuni, Ph.D. thesis - -* `Tracing the Meta-Level: PyPy's Tracing JIT Compiler`_, - C.F. Bolz, A. Cuni, M. Fijalkowski, A. Rigo - -* `Faster than C#: Efficient Implementation of Dynamic Languages on .NET`_, - A. Cuni, D. Ancona and A. Rigo - -* `Automatic JIT Compiler Generation with Runtime Partial Evaluation`_ - (Master Thesis), C.F. Bolz - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_, D. Ancona, M. Ancona, A. Cuni and N.D. Matsakis - -* `How to *not* write Virtual Machines for Dynamic Languages`_, - C.F. Bolz and A. Rigo - -* `PyPy's approach to virtual machine construction`_, A. Rigo and S. Pedroni - - -*Non-published articles (only submitted so far, or technical reports):* - -* `Automatic generation of JIT compilers for dynamic languages in .NET`_, - D. Ancona, C.F. Bolz, A. Cuni and A. Rigo - -* `EU Reports`_: a list of all the reports we produced until 2007 for the - European Union sponsored part of PyPy. Notably, it includes: - -* `Core Object Optimization Results`_, PyPy Team - -* `Compiling Dynamic Language Implementations`_, PyPy Team - - -*Other research using PyPy (as far as we know it):* - -* `PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`_, - C. Bruni and T. Verwaest - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_, - C.F. Bolz, A. Kuhn, A. Lienhard, N. Matsakis, O. Nierstrasz, L. Renggli, - A. Rigo and T. Verwaest - - -*Previous work:* - -* `Representation-Based Just-in-Time Specialization and the Psyco Prototype - for Python`_, A. Rigo - - -.. _bibtex: http://codespeak.net/svn/pypy/extradoc/talk/bibtex.bib -.. _`High performance implementation of Python for CLI/.NET with JIT compiler generation for dynamic languages`: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`How to *not* write Virtual Machines for Dynamic Languages`: http://codespeak.net/svn/pypy/extradoc/talk/dyla2007/dyla.pdf -.. _`Tracing the Meta-Level: PyPy's Tracing JIT Compiler`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009/bolz-tracing-jit.pdf -.. _`Faster than C#: Efficient Implementation of Dynamic Languages on .NET`: http://codespeak.net/svn/pypy/extradoc/talk/icooolps2009-dotnet/cli-jit.pdf -.. _`Automatic JIT Compiler Generation with Runtime Partial Evaluation`: http://codespeak.net/svn/user/cfbolz/jitpl/thesis/final-master.pdf -.. _`RPython: A Step towards Reconciling Dynamically and Statically Typed OO Languages`: http://www.disi.unige.it/person/AnconaD/papers/Recent_abstracts.html#AACM-DLS07 -.. _`EU Reports`: index-report.html -.. _`PyGirl: Generating Whole-System VMs from High-Level Prototypes using PyPy`: http://www.iam.unibe.ch/~verwaest/pygirl.pdf -.. _`Representation-Based Just-in-Time Specialization and the Psyco Prototype for Python`: http://psyco.sourceforge.net/psyco-pepm-a.ps.gz -.. _`Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`Automatic generation of JIT compilers for dynamic languages in .NET`: http://codespeak.net/svn/pypy/extradoc/talk/ecoop2009/main.pdf -.. _`Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`Compiling Dynamic Language Implementations`: http://codespeak.net/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - - -Talks and Presentations ----------------------------------- - -Talks in 2010 -+++++++++++++ - -* `PyCon 2010`_. - - -Talks in 2009 -+++++++++++++ - -* `RuPy 2009`_. - -* `EuroPython talks 2009`_. - -* `PyCon talks 2009`_. - -* `Wroclaw (Poland) presentation`_ by Maciej Fijalkowski. Introduction, - including about the current JIT. - -* `PyPy talk at OpenBossa 09`_ (blog post). - - -Talks in 2008 -+++++++++++++ - -* Talk `at PyCon Poland 08`_. In Polish. - -* `The PyPy Project and You`_, by Michael Hudson at OSDC 2008. - -* `Back to the Future in One Week -- Implementing a Smalltalk VM in PyPy`_ - by C.F. Bolz et al.; `pdf of the presentation`__ at S3 2008. - -* `EuroPython talks 2008`_. - -* PyPy at the `Maemo summit`_. - -* `PyCon UK 2008 - JIT`_ and `PyCon UK 2008 - Status`_. - -* `PyCon Italy 2008`_. - -* Talk by Maciej Fijalkowski `at SFI 08`_, Cracow (Poland) Academic IT - Festival. - -* `RuPy 2008`_. - -* `PyCon 2008`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/s3-2008/talk.pdf - - -Talks in 2007 -+++++++++++++ - -* Our "road show" tour of the United States: presentations `at IBM`__ - and `at Google`__. - -* `ESUG 2007`_. - -* `RPython: A Step towards Reconciling Dynamically and Statically Typed - OO Languages`_ at DLS 2007. `Pdf of the presentation`__. - -* Talks at `Bern (Switzerland) 2007`_. - -* `PyCon UK 2007`_. - -* A presentation in Dresden_ by Maciej Fijalkowski. - -* Multiple talks at `EuroPython 2007`_. - -* A presentation at `Bad Honnef 2007`_ by C.F. Bolz about the Prolog - interpreter. - -* A `Dzug talk`_ by Holger Krekel. - -* Multiple talks at `PyCon 2007`_. - -* A talk at `PyCon - Uno 2007`_. - -* `RuPy 2007`_. - -* `Warsaw 2007`_. - -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-ibm/ -.. __: http://codespeak.net/svn/pypy/extradoc/talk/roadshow-google/Pypy_architecture.pdf -.. __: http://codespeak.net/svn/pypy/extradoc/talk/dls2007/rpython-talk.pdf - - -Talks in 2006 -+++++++++++++ - -* `Warsaw 2006`_. - -* `Tokyo 2006`_. - -* `PyPy's VM Approach`_ talk, given by Armin Rigo at the Dynamic Languages - Symposium at OOPSLA'06 (Portland OR), and by Samuele Pedroni at Intel - Hillsboro (OR) (October). The talk presents the paper - `PyPy's approach to virtual machine construction`_ accepted for - the symposium. - -* `PyPy Status`_ talk, given by Samuele Pedroni at the Vancouner - Python Workshop 2006 (August). - -* `Trouble in Paradise`_: the Open Source Project PyPy, - EU-funding and Agile Practices talk, by Bea During at - Agile 2006 (experience report). - -* `Sprint Driven Development`_, Agile Methodologies in a - Distributed Open Source Project (PyPy) talk, by Bea During - at XP 2006 (experience report). - -* `Kill -1`_: process refactoring in the PyPy project talk, by Bea During - at the Agile track/Europython 2006. - -* `What can PyPy do for you`_, by Armin Rigo and Carl Friedrich Bolz given at - EuroPython 2006. The talk describes practical usecases of PyPy. - -* `PyPy 3000`_, a purely implementation-centered lightning talk at EuroPython - 2006, given by Armin Rigo and Holger Krekel. - -* `PyPy introduction at EuroPython 2006`_, given by Michael Hudson, also - stating the status of the project. - -* Very similar to the EuroPython intro talk (but somewhat older) is the - `PyPy intro`_ talk, given by Michael Hudson at ACCU 2006 (April) - -* `PyPy development method`_ talk, given by Bea During and - Holger Krekel at Pycon2006 - -Talks in 2005 -+++++++++++++ - - -* `PyPy - the new Python implementation on the block`_, - given by Carl Friedrich Bolz and Holger Krekel at the - 22nd Chaos Communication Conference in Berlin, Dec. 2005. - -* `Open Source, EU-Funding and Agile Methods`_, given by Holger Krekel - and Bea During at the 22nd Chaos Communication Conference in Berlin, Dec. 2005 - -* `Sprinting the PyPy way`_, an overview about our sprint methodology, given by - Bea During during EuroPython 2005. (More PyPy talks were given, but are - not present in detail.) - -* `PyCon 2005`_ animated slices, mostly reporting on the translator status. - -* `py lib slides`_ from the py lib talk at PyCon 2005 - (py is used as a support/testing library for PyPy). - -Talks in 2004 -+++++++++++++ - -* `EU funding for FOSS`_ talk on Chaos Communication - Conference in Berlin, Dec 2004. - -Talks in 2003 -+++++++++++++ - -* oscon2003-paper_ an early paper presented at Oscon 2003 describing - what the PyPy project is about and why you should care. - -* `Architecture introduction slides`_ a mostly up-to-date - introduction for the Amsterdam PyPy-Sprint Dec 2003. - -.. _`PyCon 2010`: http://morepypy.blogspot.com/2010/02/pycon-2010-report.html -.. _`RuPy 2009`: http://morepypy.blogspot.com/2009/11/pypy-on-rupy-2009.html -.. _`PyPy 3000`: http://codespeak.net/pypy/extradoc/talk/ep2006/pypy3000.txt -.. _`What can PyPy do for you`: http://codespeak.net/pypy/extradoc/talk/ep2006/usecases-slides.html -.. _`PyPy introduction at EuroPython 2006`: http://codespeak.net/pypy/extradoc/talk/ep2006/intro.pdf -.. _`PyPy - the new Python implementation on the block`: http://codespeak.net/pypy/extradoc/talk/22c3/hpk-tech.html -.. _`PyPy development method`: http://codespeak.net/pypy/extradoc/talk/pycon2006/method_talk.html -.. _`PyPy intro`: http://codespeak.net/pypy/extradoc/talk/accu2006/accu-2006.pdf -.. _oscon2003-paper: http://codespeak.net/pypy/extradoc/talk/oscon2003-paper.html -.. _`Architecture introduction slides`: http://codespeak.net/pypy/extradoc/talk/amsterdam-sprint-intro.pdf -.. _`EU funding for FOSS`: http://codespeak.net/pypy/extradoc/talk/2004-21C3-pypy-EU-hpk.pdf -.. _`py lib slides`: http://codespeak.net/pypy/extradoc/talk/2005-pycon-py.pdf -.. _`PyCon 2005`: http://codespeak.net/pypy/extradoc/talk/pypy-talk-pycon2005/README.html -.. _`Trouble in Paradise`: http://codespeak.net/pypy/extradoc/talk/agile2006/during-oss-sprints_talk.pdf -.. _`Sprint Driven Development`: http://codespeak.net/pypy/extradoc/talk/xp2006/during-xp2006-sprints.pdf -.. _`Kill -1`: http://codespeak.net/pypy/extradoc/talk/ep2006/kill_1_agiletalk.pdf -.. _`Open Source, EU-Funding and Agile Methods`: http://codespeak.net/pypy/extradoc/talk/22c3/agility.pdf -.. _`PyPy Status`: http://codespeak.net/pypy/extradoc/talk/vancouver/talk.html -.. _`Sprinting the PyPy way`: http://codespeak.net/svn/pypy/extradoc/talk/ep2005/pypy_sprinttalk_ep2005bd.pdf -.. _`PyPy's VM Approach`: http://codespeak.net/pypy/extradoc/talk/dls2006/talk.html -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf -.. _`EuroPython talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/ep2009/ -.. _`PyCon talks 2009`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2009/ -.. _`Wroclaw (Poland) presentation`: http://codespeak.net/svn/pypy/extradoc/talk/wroclaw2009/talk.pdf -.. _`PyPy talk at OpenBossa 09`: http://morepypy.blogspot.com/2009/03/pypy-talk-at-openbossa-09.html -.. _`at SFI 08`: http://codespeak.net/svn/pypy/extradoc/talk/sfi2008/ -.. _`at PyCon Poland 08`: http://codespeak.net/svn/pypy/extradoc/talk/pyconpl-2008/talk.pdf -.. _`The PyPy Project and You`: http://codespeak.net/svn/pypy/extradoc/talk/osdc2008/osdc08.pdf -.. _`EuroPython talks 2008`: http://codespeak.net/svn/pypy/extradoc/talk/ep2008/ -.. _`Maemo summit`: http://morepypy.blogspot.com/2008/09/pypypython-at-maemo-summit.html -.. _`PyCon UK 2008 - JIT`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/jit/pypy-vm.pdf -.. _`PyCon UK 2008 - Status`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uk-2008/status/status.pdf -.. _`PyCon Italy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-italy-2008/pypy-vm.pdf -.. _`RuPy 2008`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2008/ -.. _`RuPy 2007`: http://codespeak.net/svn/pypy/extradoc/talk/rupy2007/ -.. _`PyCon 2008`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2008/ -.. _`ESUG 2007`: http://codespeak.net/svn/pypy/extradoc/talk/esug2007/ -.. _`Bern (Switzerland) 2007`: http://codespeak.net/svn/pypy/extradoc/talk/bern2007/ -.. _`PyCon UK 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pyconuk07/ -.. _Dresden: http://codespeak.net/svn/pypy/extradoc/talk/dresden/ -.. _`EuroPython 2007`: http://codespeak.net/svn/pypy/extradoc/talk/ep2007/ -.. _`Bad Honnef 2007`: http://codespeak.net/svn/pypy/extradoc/talk/badhonnef2007/talk.pdf -.. _`Dzug talk`: http://codespeak.net/svn/pypy/extradoc/talk/dzug2007/dzug2007.txt -.. _`PyCon 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon2007/ -.. _`PyCon - Uno 2007`: http://codespeak.net/svn/pypy/extradoc/talk/pycon-uno2007/pycon07.pdf -.. _`Warsaw 2007`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2007/ -.. _`Warsaw 2006`: http://codespeak.net/svn/pypy/extradoc/talk/warsaw2006/ -.. _`Tokyo 2006`: http://codespeak.net/svn/pypy/extradoc/talk/tokyo/ - - -Related projects ----------------------------------- - -* TraceMonkey_ is using a tracing JIT, similar to the tracing - JITs generated by our (in-progress) JIT generator. - -* Dynamo_ showcased `transparent dynamic optimization`_ - generating an optimized version of a binary program at runtime. - -* Tailoring Dynamo_ to interpreter implementations and challenges - - Gregory Sullivan et. al., - `Dynamic Native Optimization of Native Interpreters`_. IVME 03. 2003. - -* Stackless_ is a recursion-free version of Python. - -* Psyco_ is a just-in-time specializer for Python. - -* JikesRVM_ a research dynamic optimizing Java VM written in Java. - -* `Squeak`_ is a Smalltalk-80 implementation written in - Smalltalk, being used in `Croquet`_, an experimental - distributed multi-user/multi-programmer virtual world. - -* `LLVM`_ the low level virtual machine project. - -* `CLR under the hood`_ (powerpoint, works with open office) gives - a good introduction to the underlying models of Microsoft's Common - Language Runtime, the Intermediate Language, JIT and GC issues. - -* spyweb translates Python programs to Scheme. (site unavailable) - -* Jython_ is a Python implementation in Java. - -* IronPython_ a new Python implementation compiling Python into - Microsoft's Common Language Runtime (CLR) Intermediate Language (IL). - -* Tunes_ is not entirely unrelated. The web site changed a lot, but a - snapshot of the `old Tunes Wiki`_ is available on codespeak; browsing - through it is a lot of fun. - -.. _TraceMonkey: https://wiki.mozilla.org/JavaScript:TraceMonkey -.. _`CLR under the hood`: http://download.microsoft.com/download/2/4/d/24dfac0e-fec7-4252-91b9-fb2310603f14/CLRUnderTheHood.BradA.ppt -.. _Stackless: http://stackless.com -.. _Psyco: http://psyco.sourceforge.net -.. _Jython: http://www.jython.org -.. _`Squeak`: http://www.squeak.org/ -.. _`Croquet`: http://www.opencroquet.org/ -.. _`transparent dynamic optimization`: http://www.hpl.hp.com/techreports/1999/HPL-1999-77.pdf -.. _Dynamo: http://www.hpl.hp.com/techreports/1999/HPL-1999-78.pdf -.. _testdesign: coding-guide.html#test-design -.. _feasible: http://codespeak.net/pipermail/pypy-dev/2004q2/001289.html -.. _rock: http://codespeak.net/pipermail/pypy-dev/2004q1/001255.html -.. _LLVM: http://llvm.org/ -.. _IronPython: http://www.codeplex.com/Wiki/View.aspx?ProjectName=IronPython -.. _`Dynamic Native Optimization of Native Interpreters`: http://www.ai.mit.edu/~gregs/dynamorio.html -.. _JikesRVM: http://jikesrvm.sf.net -.. _Tunes: http://tunes.org -.. _`old Tunes Wiki`: http://codespeak.net/cliki.tunes.org/ diff --git a/pypy/doc/discussion/cli-optimizations.txt b/pypy/doc/discussion/cli-optimizations.txt deleted file mode 100644 --- a/pypy/doc/discussion/cli-optimizations.txt +++ /dev/null @@ -1,233 +0,0 @@ -Possible optimizations for the CLI backend -========================================== - -Stack push/pop optimization ---------------------------- - -The CLI's VM is a stack based machine: this fact doesn't play nicely -with the SSI form the flowgraphs are generated in. At the moment -gencli does a literal translation of the SSI statements, allocating a -new local variable for each variable of the flowgraph. - -For example, consider the following RPython code and the corresponding -flowgraph:: - - def bar(x, y): - foo(x+y, x-y) - - - inputargs: x_0 y_0 - v0 = int_add(x_0, y_0) - v1 = int_sub(x_0, y_0) - v2 = directcall((sm foo), v0, v1) - -This is the IL code generated by the CLI backend:: - - .locals init (int32 v0, int32 v1, int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - stloc 'v0' - ldarg 'x_0' - ldarg 'y_0' - sub - stloc 'v1' - ldloc 'v0' - ldloc 'v1' - call int32 foo(int32, int32) - stloc 'v2' - -As you can see, the results of 'add' and 'sub' are stored in v0 and -v1, respectively, then v0 and v1 are reloaded onto stack. These -store/load is redundant, since the code would work nicely even without -them:: - - .locals init (int32 v2) - - block0: - ldarg 'x_0' - ldarg 'y_0' - add - ldarg 'x_0' - ldarg 'y_0' - sub - call int32 foo(int32, int32) - stloc 'v2' - -I've checked the native code generated by the Mono Jit on x86 and I've -seen that it does not optimize it. I haven't checked the native code -generated by Microsoft CLR, yet. - -Thus, we might consider to optimize it manually; it should not be so -difficult, but it is not trivial because we have to make sure that the -dropped locals are used only once. - - -Mapping RPython exceptions to native CLI exceptions ---------------------------------------------------- - -Both RPython and CLI have its own set of exception classes: some of -these are pretty similar; e.g., we have OverflowError, -ZeroDivisionError and IndexError on the first side and -OverflowException, DivideByZeroException and IndexOutOfRangeException -on the other side. - -The first attempt was to map RPython classes to their corresponding -CLI ones: this worked for simple cases, but it would have triggered -subtle bugs in more complex ones, because the two exception -hierarchies don't completely overlap. - -For now I've chosen to build an RPython exception hierarchy -completely independent from the CLI one, but this means that we can't -rely on exceptions raised by standard operations. The currently -implemented solution is to do an exception translation on-the-fly; for -example, the 'ind_add_ovf' is translated into the following IL code:: - - .try - { - ldarg 'x_0' - ldarg 'y_0' - add.ovf - stloc 'v1' - leave __check_block_2 - } - catch [mscorlib]System.OverflowException - { - newobj instance void class exceptions.OverflowError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_OverflowError_meta - stfld class Object_meta Object::meta - throw - } - -I.e., it catches the builtin OverflowException and raises a RPython -OverflowError. - -I haven't measured timings yet, but I guess that this machinery brings -to some performance penalties even in the non-overflow case; a -possible optimization is to do the on-the-fly translation only when it -is strictly necessary, i.e. only when the except clause catches an -exception class whose subclass hierarchy is compatible with the -builtin one. As an example, consider the following RPython code:: - - try: - return mylist[0] - except IndexError: - return -1 - -Given that IndexError has no subclasses, we can map it to -IndexOutOfBoundException and directly catch this one:: - - try - { - ldloc 'mylist' - ldc.i4 0 - call int32 getitem(MyListType, int32) - ... - } - catch [mscorlib]System.IndexOutOfBoundException - { - // return -1 - ... - } - -By contrast we can't do so if the except clause catches classes that -don't directly map to any builtin class, such as LookupError:: - - try: - return mylist[0] - except LookupError: - return -1 - -Has to be translated in the old way:: - - .try - { - ldloc 'mylist' - ldc.i4 0 - - .try - { - call int32 getitem(MyListType, int32) - } - catch [mscorlib]System.IndexOutOfBoundException - { - // translate IndexOutOfBoundException into IndexError - newobj instance void class exceptions.IndexError::.ctor() - dup - ldsfld class Object_meta pypy.runtime.Constants::exceptions_IndexError_meta - stfld class Object_meta Object::meta - throw - } - ... - } - .catch exceptions.LookupError - { - // return -1 - ... - } - - -Specializing methods of List ----------------------------- - -Most methods of RPython lists are implemented by ll_* helpers placed -in rpython/rlist.py. For some of those we have a direct correspondent -already implemented in .NET List<>; we could use the oopspec attribute -for doing an on-the-fly replacement of these low level helpers with -their builtin correspondent. As an example the 'append' method is -already mapped to pypylib.List.append. Thanks to Armin Rigo for the -idea of using oopspec. - - -Doing some caching on Dict --------------------------- - -The current implementations of ll_dict_getitem and ll_dict_get in -ootypesystem.rdict do two consecutive lookups (calling ll_contains and -ll_get) on the same key. We might cache the result of -pypylib.Dict.ll_contains so that the successive ll_get don't need a -lookup. Btw, we need some profiling before choosing the best way. Or -we could directly refactor ootypesystem.rdict for doing a single -lookup. - -XXX -I tried it on revision 32917 and performance are slower! I don't know -why, but pypy.net pystone.py is slower by 17%, and pypy.net -richards.py is slower by 71% (!!!). I don't know why, need to be -investigated further. - - -Optimize StaticMethod ---------------------- - -:: - - 2006-10-02, 13:41 - - antocuni: do you try to not wrap static methods that are just called and not passed around - no - I think I don't know how to detect them - antocuni: you should try to render them just as static methods not as instances when possible - you need to track what appears only in direct_calls vs other places - - -Optimize Unicode ----------------- - -We should try to use native .NET unicode facilities instead of our -own. These should save both time (especially startup time) and memory. - -On 2006-10-02 I got these benchmarks: - -Pypy.NET Startup time Memory used -with unicodedata ~12 sec 112508 Kb -without unicodedata ~6 sec 79004 Kb - -The version without unicodedata is buggy, of course. - -Unfortunately it seems that .NET doesn't expose all the things we -need, so we will still need some data. For example there is no way to -get the unicode name of a char. diff --git a/pypy/doc/config/objspace.std.optimized_list_getitem.txt b/pypy/doc/config/objspace.std.optimized_list_getitem.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.optimized_list_getitem.txt +++ /dev/null @@ -1,1 +0,0 @@ -Optimized list[int] a bit. diff --git a/pypy/doc/geninterp.txt b/pypy/doc/geninterp.txt deleted file mode 100644 --- a/pypy/doc/geninterp.txt +++ /dev/null @@ -1,188 +0,0 @@ -The Interpreter-Level backend ------------------------------ - -http://codespeak.net/pypy/trunk/pypy/translator/geninterplevel.py - -Motivation -++++++++++ - -PyPy often makes use of `application-level`_ helper methods. -The idea of the 'geninterplevel' backend is to automatically transform -such application level implementations to their equivalent representation -at interpreter level. Then, the RPython to C translation hopefully can -produce more efficient code than always re-interpreting these methods. - -One property of translation from application level Python to -Python is, that the produced code does the same thing as the -corresponding interpreted code, but no interpreter is needed -any longer to execute this code. - -.. _`application-level`: coding-guide.html#app-preferable - -Bootstrap issue -+++++++++++++++ - -One issue we had so far was of bootstrapping: some pieces of the -interpreter (e.g. exceptions) were written in geninterped code. -It is unclear how much of it is left, thought. - -That bootstrap issue is (was?) solved by invoking a new bytecode interpreter -which runs on FlowObjspace. FlowObjspace is complete without -complicated initialization. It is able to do abstract interpretation -of any Rpythonic code, without actually implementing anything. It just -records all the operations the bytecode interpreter would have done by -building flowgraphs for all the code. What the Python backend does is -just to produce correct Python code from these flowgraphs and return -it as source code. In the produced code Python operations recorded in -the original flowgraphs are replaced by calls to the corresponding -methods in the `object space`_ interface. - -.. _`object space`: objspace.html - -Example -+++++++ - -.. _implementation: ../../pypy/translator/geninterplevel.py - -Let's try a little example. You might want to look at the flowgraph that it -produces. Here, we directly run the Python translation and look at the -generated source. See also the header section of the implementation_ for the -interface:: - - >>> from pypy.translator.geninterplevel import translate_as_module - >>> entrypoint, source = translate_as_module(""" - ... - ... def g(n): - ... i = 0 - ... while n: - ... i = i + n - ... n = n - 1 - ... return i - ... - ... """) - -This call has invoked a PyPy bytecode interpreter running on FlowObjspace, -recorded every possible codepath into a flowgraph, and then rendered the -following source code:: - - #!/bin/env python - # -*- coding: LATIN-1 -*- - - def initapp2interpexec(space): - """NOT_RPYTHON""" - - def g(space, w_n_1): - goto = 3 # startblock - while True: - - if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - - if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - - if goto == 3: - w_n, w_0 = w_n_1, gi_0 - goto = 1 - continue - - if goto == 4: - return w_0 - - fastf_g = g - - g3dict = space.newdict() - gs___name__ = space.new_interned_str('__name__') - gs_app2interpexec = space.new_interned_str('app2interpexec') - space.setitem(g3dict, gs___name__, gs_app2interpexec) - gs_g = space.new_interned_str('g') - from pypy.interpreter import gateway - gfunc_g = space.wrap(gateway.interp2app(fastf_g, unwrap_spec=[gateway.ObjSpace, gateway.W_Root])) - space.setitem(g3dict, gs_g, gfunc_g) - gi_1 = space.wrap(1) - gi_0 = space.wrap(0) - return g3dict - -You see that actually a single function is produced: -``initapp2interpexec``. This is the function that you will call with a -space as argument. It defines a few functions and then does a number -of initialization steps, builds the global objects the function need, -and produces the PyPy function object ``gfunc_g``. - -The return value is ``g3dict``, which contains a module name and the -function we asked for. - -Let's have a look at the body of this code: The definition of ``g`` is -used as ``fast_g`` in the ``gateway.interp2app`` which constructs a -PyPy function object which takes care of argument unboxing (based on -the ``unwrap_spec``), and of invoking the original ``g``. - -We look at the definition of ``g`` itself which does the actual -computation. Comparing to the flowgraph, you see a code block for -every block in the graph. Since Python has no goto statement, the -jumps between the blocks are implemented by a loop that switches over -a ``goto`` variable. - -:: - - . if goto == 1: - v0 = space.is_true(w_n) - if v0 == True: - goto = 2 - else: - goto = 4 - -This is the implementation of the "``while n:``". There is no implicit state, -everything is passed over to the next block by initializing its -input variables. This directly resembles the nature of flowgraphs. -They are completely stateless. - - -:: - - . if goto == 2: - w_1 = space.add(w_0, w_n) - w_2 = space.sub(w_n, gi_1) - w_n, w_0 = w_2, w_1 - goto = 1 - continue - -The "``i = i + n``" and "``n = n - 1``" instructions. -You see how every instruction produces a new variable. -The state is again shuffled around by assigning to the -input variables ``w_n`` and ``w_0`` of the next target, block 1. - -Note that it is possible to rewrite this by re-using variables, -trying to produce nested blocks instead of the goto construction -and much more. The source would look much more like what we -used to write by hand. For the C backend, this doesn't make much -sense since the compiler optimizes it for us. For the Python interpreter it could -give a bit more speed. But this is a temporary format and will -get optimized anyway when we produce the executable. - -Interplevel Snippets in the Sources -+++++++++++++++++++++++++++++++++++ - -Code written in application space can consist of complete files -to be translated, or they -can be tiny snippets scattered all over a source file, similar -to our example from above. - -Translation of these snippets is done automatically and cached -in pypy/_cache with the modulename and the md5 checksum appended -to it as file name. If you have run your copy of pypy already, -this folder should exist and have some generated files in it. -These files consist of the generated code plus a little code -that auto-destructs the cached file (plus .pyc/.pyo versions) -if it is executed as __main__. On windows this means you can wipe -a cached code snippet clear by double-clicking it. Note also that -the auto-generated __init__.py file wipes the whole directory -when executed. diff --git a/pypy/doc/garbage_collection.txt b/pypy/doc/garbage_collection.txt deleted file mode 100644 --- a/pypy/doc/garbage_collection.txt +++ /dev/null @@ -1,127 +0,0 @@ -========================== -Garbage Collection in PyPy -========================== - -.. contents:: -.. sectnum:: - -Introduction -============ - -**Warning**: The overview and description of our garbage collection -strategy and framework is not here but in the `EU-report on this -topic`_. The present document describes the specific garbage collectors -that we wrote in our framework. - -.. _`EU-report on this topic`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf - - -Garbage collectors currently written for the GC framework -========================================================= - -(Very rough sketch only for now.) - -Reminder: to select which GC you want to include in a translated -RPython program, use the ``--gc=NAME`` option of ``translate.py``. -For more details, see the `overview of command line options for -translation`_. - -.. _`overview of command line options for translation`: config/commandline.html#translation - -Mark and Sweep --------------- - -Classical Mark and Sweep collector. Also contains a lot of experimental -and half-unmaintained features. See `rpython/memory/gc/marksweep.py`_. - -Semispace copying collector ---------------------------- - -Two arenas of equal size, with only one arena in use and getting filled -with new objects. When the arena is full, the live objects are copied -into the other arena using Cheney's algorithm. The old arena is then -cleared. See `rpython/memory/gc/semispace.py`_. - -On Unix the clearing is done by reading ``/dev/zero`` into the arena, -which is extremely memory efficient at least on Linux: it lets the -kernel free the RAM that the old arena used and replace it all with -allocated-on-demand memory. - -The size of each semispace starts at 8MB but grows as needed when the -amount of objects alive grows. - -Generational GC ---------------- - -This is a two-generations GC. See `rpython/memory/gc/generation.py`_. - -It is implemented as a subclass of the Semispace copying collector. It -adds a nursery, which is a chunk of the current semispace. Its size is -computed to be half the size of the CPU Level 2 cache. Allocations fill -the nursery, and when it is full, it is collected and the objects still -alive are moved to the rest of the current semispace. - -The idea is that it is very common for objects to die soon after they -are created. Generational GCs help a lot in this case, particularly if -the amount of live objects really manipulated by the program fits in the -Level 2 cache. Moreover, the semispaces fill up much more slowly, -making full collections less frequent. - -Hybrid GC ---------- - -This is a three-generations GC. - -It is implemented as a subclass of the Generational GC. The Hybrid GC -can handle both objects that are inside and objects that are outside the -semispaces ("external"). The external objects are not moving and -collected in a mark-and-sweep fashion. Large objects are allocated as -external objects to avoid costly moves. Small objects that survive for -a long enough time (several semispace collections) are also made -external so that they stop moving. - -This is coupled with a segregation of the objects in three generations. -Each generation is collected much less often than the previous one. The -division of the generations is slightly more complicated than just -nursery / semispace / external; see the diagram at the start of the -source code, in `rpython/memory/gc/hybrid.py`_. - -Mark & Compact GC ------------------ - -Inspired, at least partially, by Squeak's garbage collector, this is a -single-arena GC in which collection compacts the objects in-place. The -main point of this GC is to save as much memory as possible (to be not -worse than the Semispace), but without the peaks of double memory usage -during collection. - -Unlike the Semispace GC, collection requires a number of passes over the -data. This makes collection quite slower. Future improvements could be -to add a nursery to Mark & Compact in order to mitigate this issue. - -During a collection, we reuse the space in-place if it is still large -enough. If not, we need to allocate a new, larger space, and move the -objects there; however, this move is done chunk by chunk, and chunks are -cleared (i.e. returned to the OS) as soon as they have been moved away. -This means that (from the point of view of the OS) a collection will -never cause an important temporary growth of total memory usage. - -More precisely, a collection is triggered when the space contains more -than N*M bytes, where N is the number of bytes alive after the previous -collection and M is a constant factor, by default 1.5. This guarantees -that the total memory usage of the program never exceeds 1.5 times the -total size of its live objects. - -The objects themselves are quite compact: they are allocated next to -each other in the heap, separated by a GC header of only one word (4 -bytes on 32-bit platforms) and possibly followed by up to 3 bytes of -padding for non-word-sized objects (e.g. strings). There is a small -extra memory usage during collection: an array containing 2 bytes per -surviving object is needed to make a backup of (half of) the surviving -objects' header, in order to let the collector store temporary relation -information in the regular headers. - -More details are available as comments at the start of the source -in `rpython/memory/gc/markcompact.py`_. - -.. include:: _ref.txt diff --git a/pypy/doc/extending.txt b/pypy/doc/extending.txt deleted file mode 100644 --- a/pypy/doc/extending.txt +++ /dev/null @@ -1,103 +0,0 @@ - -Writing extension modules for pypy -=================================== - -This document tries to explain how to interface the PyPy python interpreter -with any external library. - -Note: We try to describe state-of-the art, but it -might fade out of date as this is the front on which things are changing -in pypy rapidly. - -Possibilities -============= - -Right now, there are three possibilities of providing third-party modules -for the PyPy python interpreter (in order of usefulness): - -* Write them in pure python and use ctypes, see ctypes_ - section - -* Write them in pure python and use direct libffi low-level bindings, See - \_rawffi_ module description. - -* Write them in RPython as mixedmodule_, using *rffi* as bindings. - -.. _ctypes: #CTypes -.. _\_rawffi: #LibFFI -.. _mixedmodule: #Mixed Modules - -CTypes -====== - -The ctypes module in PyPy is ready to use. -It's goal is to be as-compatible-as-possible with the -`CPython ctypes`_ version. Right now it's able to support large examples, -such as pyglet. PyPy is planning to have a 100% compatible ctypes -implementation, without the CPython C-level API bindings (so it is very -unlikely that direct object-manipulation trickery through this API will work). - -We also provide a `ctypes-configure`_ for overcoming the platform dependencies, -not relying on the ctypes codegen. This tool works by querying gcc about -platform-dependent details (compiling small snippets of C code and running -them), so it'll benefit not pypy-related ctypes-based modules as well. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html - -Pros ----- - -Stable, CPython-compatible API - -Cons ----- - -Only pure-python code (slow), problems with platform-dependency (although -we partially solve those). PyPy implementation is now very slow. - -_`CPython ctypes`: http://python.net/crew/theller/ctypes/ - -LibFFI -====== - -Mostly in order to be able to write a ctypes module, we developed a very -low-level libffi bindings. (libffi is a C-level library for dynamic calling, -which is used by CPython ctypes). This library provides stable and usable API, -although it's API is a very low-level one. It does not contain any -magic. - -Pros ----- - -Works. Combines disadvantages of using ctypes with disadvantages of -using mixed modules. Probably more suitable for a delicate code -where ctypes magic goes in a way. - -Cons ----- - -Slow. CPython-incompatible API, very rough and low-level - -Mixed Modules -============= - -This is the most advanced and powerful way of writing extension modules. -It has some serious disadvantages: - -* a mixed module needs to be written in RPython, which is far more - complicated than Python (XXX link) - -* due to lack of separate compilation (as of April 2008), each - compilation-check requires to recompile whole PyPy python interpreter, - which takes 0.5-1h. We plan to solve this at some point in near future. - -* although rpython is a garbage-collected language, the border between - C and RPython needs to be managed by hand (each object that goes into the - C level must be explicitly freed) XXX we try to solve this - -Some document is available `here`_ - -.. _`here`: rffi.html - -XXX we should provide detailed docs about lltype and rffi, especially if we - want people to follow that way. diff --git a/pypy/doc/config/objspace.usemodules._testing.txt b/pypy/doc/config/objspace.usemodules._testing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._testing.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_testing' module. This module exists only for PyPy own testing purposes. - -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.gc.txt b/pypy/doc/config/translation.gc.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gc.txt +++ /dev/null @@ -1,13 +0,0 @@ -Choose the Garbage Collector used by the translated program: - - - "ref": reference counting. Takes very long to translate and the result is - slow. - - - "marksweep": naive mark & sweep. - - - "semispace": a copying semi-space GC. - - - "generation": a generational GC using the semi-space GC for the - older generation. - - - "boehm": use the Boehm conservative GC. diff --git a/pypy/doc/config/translation.instrument.txt b/pypy/doc/config/translation.instrument.txt deleted file mode 100644 --- a/pypy/doc/config/translation.instrument.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/objspace.usemodules.imp.txt b/pypy/doc/config/objspace.usemodules.imp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.imp.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'imp' module. -This module is included by default. diff --git a/pypy/doc/contributor.txt b/pypy/doc/contributor.txt deleted file mode 100644 --- a/pypy/doc/contributor.txt +++ /dev/null @@ -1,105 +0,0 @@ - -Contributors to PyPy -==================== - -Here is a list of developers who have committed to the PyPy source -code base, ordered by number of commits (which is certainly not a very -appropriate measure but it's something):: - - - Armin Rigo - Maciej Fijalkowski - Carl Friedrich Bolz - Samuele Pedroni - Antonio Cuni - Michael Hudson - Christian Tismer - Holger Krekel - Eric van Riet Paap - Richard Emslie - Anders Chrigstrom - Amaury Forgeot d Arc - Aurelien Campeas - Anders Lehmann - Niklaus Haldimann - Seo Sanghyeon - Leonardo Santagada - Lawrence Oluyede - Jakub Gustak - Guido Wesdorp - Benjamin Peterson - Alexander Schremmer - Niko Matsakis - Ludovic Aubry - Alex Martelli - Toon Verwaest - Stephan Diehl - Adrien Di Mascio - Stefan Schwarzer - Tomek Meka - Patrick Maupin - Jacob Hallen - Laura Creighton - Bob Ippolito - Camillo Bruni - Simon Burton - Bruno Gola - Alexandre Fayolle - Marius Gedminas - Guido van Rossum - Valentino Volonghi - Adrian Kuhn - Paul deGrandis - Gerald Klix - Wanja Saatkamp - Anders Hammarquist - Oscar Nierstrasz - Eugene Oden - Lukas Renggli - Guenter Jantzen - Dinu Gherman - Bartosz Skowron - Georg Brandl - Ben Young - Jean-Paul Calderone - Nicolas Chauvat - Rocco Moretti - Michael Twomey - boria - Jared Grubb - Olivier Dormond - Stuart Williams - Jens-Uwe Mager - Justas Sadzevicius - Mikael Schönenberg - Brian Dorsey - Jonathan David Riehl - Beatrice During - Elmo Mäntynen - Andreas Friedge - Alex Gaynor - Anders Qvist - Alan McIntyre - Bert Freudenberg - Pieter Zieschang - Jacob Oscarson - Lutz Paelike - Michael Schneider - Artur Lisiecki - Lene Wagner - Christopher Armstrong - Jan de Mooij - Jacek Generowicz - Gasper Zejn - Stephan Busemann - Yusei Tahara - Godefroid Chappelle - Toby Watson - Andrew Thompson - Joshua Gilbert - Anders Sigfridsson - David Schneider - Michael Chermside - tav - Martin Blais - Victor Stinner diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline.txt b/pypy/doc/config/translation.backendopt.profile_based_inline.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline.txt +++ /dev/null @@ -1,10 +0,0 @@ -Inline flowgraphs only for call-sites for which there was a minimal -number of calls during an instrumented run of the program. Callee -flowgraphs are considered candidates based on a weight heuristic like -for basic inlining. (see :config:`translation.backendopt.inline`, -:config:`translation.backendopt.profile_based_inline_threshold` ). - -The option takes as value a string which is the arguments to pass to -the program for the instrumented run. - -This optimization is not used by default. \ No newline at end of file diff --git a/pypy/doc/config/translation.txt b/pypy/doc/config/translation.txt deleted file mode 100644 --- a/pypy/doc/config/translation.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.shared.txt b/pypy/doc/config/translation.shared.txt deleted file mode 100644 --- a/pypy/doc/config/translation.shared.txt +++ /dev/null @@ -1,2 +0,0 @@ -Build pypy as a shared library or a DLL, with a small executable to run it. -This is necessary on Windows to expose the C API provided by the cpyext module. diff --git a/pypy/doc/config/objspace.usemodules.pypyjit.txt b/pypy/doc/config/objspace.usemodules.pypyjit.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.pypyjit.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'pypyjit' module. diff --git a/pypy/doc/config/translation.thread.txt b/pypy/doc/config/translation.thread.txt deleted file mode 100644 --- a/pypy/doc/config/translation.thread.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable threading. The only target where this has visible effect is PyPy (this -also enables the ``thread`` module then). diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -708,6 +708,7 @@ # Note that this may be called recursively; that's why the # allocate() methods must fill in the cache as soon as they # have the object, before they fill its fields. + assert self.virtuals_cache is not None v = self.virtuals_cache[index] if not v: v = self.rd_virtuals[index].allocate(self, index) diff --git a/pypy/doc/config/translation.backendopt.inline_threshold.txt b/pypy/doc/config/translation.backendopt.inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for basic inlining (:config:`translation.backendopt.inline`). diff --git a/pypy/doc/coding-guide.txt b/pypy/doc/coding-guide.txt deleted file mode 100644 --- a/pypy/doc/coding-guide.txt +++ /dev/null @@ -1,1088 +0,0 @@ -===================================== -PyPy - Coding Guide -===================================== - -.. contents:: -.. sectnum:: - - -This document describes coding requirements and conventions for -working with the PyPy code base. Please read it carefully and -ask back any questions you might have. The document does not talk -very much about coding style issues. We mostly follow `PEP 8`_ though. -If in doubt, follow the style that is already present in the code base. - -.. _`PEP 8`: http://www.python.org/dev/peps/pep-0008/ - -.. _`RPython`: - -Overview and motivation -======================== - -We are writing a Python interpreter in Python, using Python's well known -ability to step behind the algorithmic problems as a language. At first glance, -one might think this achieves nothing but a better understanding how the -interpreter works. This alone would make it worth doing, but we have much -larger goals. - - -CPython vs. PyPy -------------------- - -Compared to the CPython implementation, Python takes the role of the C -Code. We rewrite the CPython interpreter in Python itself. We could -also aim at writing a more flexible interpreter at C level but we -want to use Python to give an alternative description of the interpreter. - -The clear advantage is that such a description is shorter and simpler to -read, and many implementation details vanish. The drawback of this approach is -that this interpreter will be unbearably slow as long as it is run on top -of CPython. - -To get to a useful interpreter again, we need to translate our -high-level description of Python to a lower level one. One rather -straight-forward way is to do a whole program analysis of the PyPy -interpreter and create a C source, again. There are many other ways, -but let's stick with this somewhat canonical approach. - - -.. _`application-level`: -.. _`interpreter-level`: - -Application-level and interpreter-level execution and objects -------------------------------------------------------------- - -Since Python is used for implementing all of our code base, there is a -crucial distinction to be aware of: that between *interpreter-level* objects and -*application-level* objects. The latter are the ones that you deal with -when you write normal python programs. Interpreter-level code, however, -cannot invoke operations nor access attributes from application-level -objects. You will immediately recognize any interpreter level code in -PyPy, because half the variable and object names start with a ``w_``, which -indicates that they are `wrapped`_ application-level values. - -Let's show the difference with a simple example. To sum the contents of -two variables ``a`` and ``b``, one would write the simple application-level -``a+b`` -- in contrast, the equivalent interpreter-level code is -``space.add(w_a, w_b)``, where ``space`` is an instance of an object space, -and ``w_a`` and ``w_b`` are typical names for the wrapped versions of the -two variables. - -It helps to remember how CPython deals with the same issue: interpreter -level code, in CPython, is written in C and thus typical code for the -addition is ``PyNumber_Add(p_a, p_b)`` where ``p_a`` and ``p_b`` are C -variables of type ``PyObject*``. This is conceptually similar to how we write -our interpreter-level code in Python. - -Moreover, in PyPy we have to make a sharp distinction between -interpreter- and application-level *exceptions*: application exceptions -are always contained inside an instance of ``OperationError``. This -makes it easy to distinguish failures (or bugs) in our interpreter-level code -from failures appearing in a python application level program that we are -interpreting. - - -.. _`app-preferable`: - -Application level is often preferable -------------------------------------- - -Application-level code is substantially higher-level, and therefore -correspondingly easier to write and debug. For example, suppose we want -to implement the ``update`` method of dict objects. Programming at -application level, we can write an obvious, simple implementation, one -that looks like an **executable definition** of ``update``, for -example:: - - def update(self, other): - for k in other.keys(): - self[k] = other[k] - -If we had to code only at interpreter level, we would have to code -something much lower-level and involved, say something like:: - - def update(space, w_self, w_other): - w_keys = space.call_method(w_other, 'keys') - w_iter = space.iter(w_keys) - while True: - try: - w_key = space.next(w_iter) - except OperationError, e: - if not e.match(space, space.w_StopIteration): - raise # re-raise other app-level exceptions - break - w_value = space.getitem(w_other, w_key) - space.setitem(w_self, w_key, w_value) - -This interpreter-level implementation looks much more similar to the C -source code. It is still more readable than its C counterpart because -it doesn't contain memory management details and can use Python's native -exception mechanism. - -In any case, it should be obvious that the application-level implementation -is definitely more readable, more elegant and more maintainable than the -interpreter-level one (and indeed, dict.update is really implemented at -applevel in PyPy). - -In fact, in almost all parts of PyPy, you find application level code in -the middle of interpreter-level code. Apart from some bootstrapping -problems (application level functions need a certain initialization -level of the object space before they can be executed), application -level code is usually preferable. We have an abstraction (called the -'Gateway') which allows the caller of a function to remain ignorant of -whether a particular function is implemented at application or -interpreter level. - -our runtime interpreter is "restricted python" ----------------------------------------------- - -In order to make a C code generator feasible all code on interpreter level has -to restrict itself to a subset of the Python language, and we adhere to some -rules which make translation to lower level languages feasible. Code on -application level can still use the full expressivity of Python. - -Unlike source-to-source translations (like e.g. Starkiller_ or more recently -ShedSkin_) we start -translation from live python code objects which constitute our Python -interpreter. When doing its work of interpreting bytecode our Python -implementation must behave in a static way often referenced as -"RPythonic". - -.. _Starkiller: http://www.python.org/pycon/dc2004/papers/1/paper.pdf -.. _ShedSkin: http://shed-skin.blogspot.com/ - -However, when the PyPy interpreter is started as a Python program, it -can use all of the Python language until it reaches a certain point in -time, from which on everything that is being executed must be static. -That is, during initialization our program is free to use the -full dynamism of Python, including dynamic code generation. - -An example can be found in the current implementation which is quite -elegant: For the definition of all the opcodes of the Python -interpreter, the module ``dis`` is imported and used to initialize our -bytecode interpreter. (See ``__initclass__`` in -`pypy/interpreter/pyopcode.py`_). This -saves us from adding extra modules to PyPy. The import code is run at -startup time, and we are allowed to use the CPython builtin import -function. - -After the startup code is finished, all resulting objects, functions, -code blocks etc. must adhere to certain runtime restrictions which we -describe further below. Here is some background for why this is so: -during translation, a whole program analysis ("type inference") is -performed, which makes use of the restrictions defined in RPython. This -enables the code generator to emit efficient machine level replacements -for pure integer objects, for instance. - -Restricted Python -================= - -RPython Definition, not ------------------------ - -The list and exact details of the "RPython" restrictions are a somewhat -evolving topic. In particular, we have no formal language definition -as we find it more practical to discuss and evolve the set of -restrictions while working on the whole program analysis. If you -have any questions about the restrictions below then please feel -free to mail us at pypy-dev at codespeak net. - -.. _`wrapped object`: coding-guide.html#wrapping-rules - -Flow restrictions -------------------------- - -**variables** - - variables should contain values of at most one type as described in - `Object restrictions`_ at each control flow point, that means for - example that joining control paths using the same variable to - contain both a string and a int must be avoided. It is allowed to - mix None (basically with the role of a null pointer) with many other - types: `wrapped objects`, class instances, lists, dicts, strings, etc. - but *not* with int and floats. - -**constants** - - all module globals are considered constants. Their binding must not - be changed at run-time. Moreover, global (i.e. prebuilt) lists and - dictionaries are supposed to be immutable: modifying e.g. a global - list will give inconsistent results. However, global instances don't - have this restriction, so if you need mutable global state, store it - in the attributes of some prebuilt singleton instance. - -**control structures** - - all allowed but yield, ``for`` loops restricted to builtin types - -**range** - - ``range`` and ``xrange`` are identical. ``range`` does not necessarily create an array, - only if the result is modified. It is allowed everywhere and completely - implemented. The only visible difference to CPython is the inaccessibility - of the ``xrange`` fields start, stop and step. - -**definitions** - - run-time definition of classes or functions is not allowed. - -**generators** - - generators are not supported. - -**exceptions** - -+ fully supported -+ see below `Exception rules`_ for restrictions on exceptions raised by built-in operations - - -Object restrictions -------------------------- - -We are using - -**integer, float, boolean** - - works. - -**strings** - - a lot of, but not all string methods are supported. Indexes can be - negative. In case they are not, then you get slightly more efficient - code if the translator can prove that they are non-negative. When - slicing a string it is necessary to prove that the slice start and - stop indexes are non-negative. - -**tuples** - - no variable-length tuples; use them to store or return pairs or n-tuples of - values. Each combination of types for elements and length constitute a separate - and not mixable type. - -**lists** - - lists are used as an allocated array. Lists are over-allocated, so list.append() - is reasonably fast. Negative or out-of-bound indexes are only allowed for the - most common operations, as follows: - - - *indexing*: - positive and negative indexes are allowed. Indexes are checked when requested - by an IndexError exception clause. - - - *slicing*: - the slice start must be within bounds. The stop doesn't need to, but it must - not be smaller than the start. All negative indexes are disallowed, except for - the [:-1] special case. No step. - - - *other operators*: - ``+``, ``+=``, ``in``, ``*``, ``*=``, ``==``, ``!=`` work as expected. - - - *methods*: - append, index, insert, extend, reverse, pop. The index used in pop() follows - the same rules as for *indexing* above. The index used in insert() must be within - bounds and not negative. - -**dicts** - - dicts with a unique key type only, provided it is hashable. - String keys have been the only allowed key types for a while, but this was generalized. - After some re-optimization, - the implementation could safely decide that all string dict keys should be interned. - - -**list comprehensions** - - may be used to create allocated, initialized arrays. - After list over-allocation was introduced, there is no longer any restriction. - -**functions** - -+ statically called functions may use defaults and a variable number of - arguments (which may be passed as a list instead of a tuple, so write code - that does not depend on it being a tuple). - -+ dynamic dispatch enforces the use of signatures that are equal for all - possible called function, or at least "compatible enough". This - concerns mainly method calls, when the method is overridden or in any - way given different definitions in different classes. It also concerns - the less common case of explicitly manipulated function objects. - Describing the exact compatibility rules is rather involved (but if you - break them, you should get explicit errors from the rtyper and not - obscure crashes.) - -**builtin functions** - - A number of builtin functions can be used. The precise set can be - found in `pypy/annotation/builtin.py`_ (see ``def builtin_xxx()``). - Some builtin functions may be limited in what they support, though. - - ``int, float, str, ord, chr``... are available as simple conversion - functions. Note that ``int, float, str``... have a special meaning as - a type inside of isinstance only. - -**classes** - -+ methods and other class attributes do not change after startup -+ single inheritance is fully supported -+ simple mixins work too, but the mixed in class needs a ``_mixin_ = True`` - class attribute - -+ classes are first-class objects too - -**objects** - - in PyPy, wrapped objects are borrowed from the object space. Just like - in CPython, code that needs e.g. a dictionary can use a wrapped dict - and the object space operations on it. - -This layout makes the number of types to take care about quite limited. - - -Integer Types -------------------------- - -While implementing the integer type, we stumbled over the problem that -integers are quite in flux in CPython right now. Starting on Python 2.2, -integers mutate into longs on overflow. However, shifting to the left -truncates up to 2.3 but extends to longs as well in 2.4. By contrast, we need -a way to perform wrap-around machine-sized arithmetic by default, while still -being able to check for overflow when we need it explicitly. Moreover, we need -a consistent behavior before and after translation. - -We use normal integers for signed arithmetic. It means that before -translation we get longs in case of overflow, and after translation we get a -silent wrap-around. Whenever we need more control, we use the following -helpers (which live the `pypy/rlib/rarithmetic.py`_): - -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py - - -**ovfcheck()** - - This special function should only be used with a single arithmetic operation - as its argument, e.g. ``z = ovfcheck(x+y)``. Its intended meaning is to - perform the given operation in overflow-checking mode. - - At run-time, in Python, the ovfcheck() function itself checks the result - and raises OverflowError if it is a ``long``. But the code generators use - ovfcheck() as a hint: they replace the whole ``ovfcheck(x+y)`` expression - with a single overflow-checking addition in C. - -**ovfcheck_lshift()** - - ovfcheck_lshift(x, y) is a workaround for ovfcheck(x<=0.13.0) can be run with the ``--rpython-mode`` command line option. This option -enables the RPython checker which will checks for some of the -restrictions RPython adds on standard Python code (and uses a -more aggressive type inference than the one used by default by -pylint). The full list of checks is available in the documentation of -Pylint. - -RPylint can be a nice tool to get some information about how much work -will be needed to convert a piece of Python code to RPython, or to get -started with RPython. While this tool will not guarantee that the -code it checks will be translate successfully, it offers a few nice -advantages over running a translation: - -* it is faster and therefore provides feedback faster than ``translate.py`` - -* it does not stop at the first problem it finds, so you can get more - feedback on the code in one run - -* the messages tend to be a bit less cryptic - -* you can easily run it from emacs, vi, eclipse or visual studio. - -Note: if pylint is not prepackaged for your OS/distribution, or if -only an older version is available, you will need to install from -source. In that case, there are a couple of dependencies, -logilab-common_ and astng_ that you will need to install too before -you can use the tool. - -.. _Pylint: http://www.logilab.org/projects/pylint -.. _logilab-common: http://www.logilab.org/projects/common -.. _astng: http://www.logilab.org/projects/astng - - - -Wrapping rules -============== - -Wrapping ---------- - -PyPy is made of Python source code at two levels: there is on the one hand -*application-level code* that looks like normal Python code, and that -implements some functionalities as one would expect from Python code (e.g. one -can give a pure Python implementation of some built-in functions like -``zip()``). There is also *interpreter-level code* for the functionalities -that must more directly manipulate interpreter data and objects (e.g. the main -loop of the interpreter, and the various object spaces). - -Application-level code doesn't see object spaces explicitly: it runs using an -object space to support the objects it manipulates, but this is implicit. -There is no need for particular conventions for application-level code. The -sequel is only about interpreter-level code. (Ideally, no application-level -variable should be called ``space`` or ``w_xxx`` to avoid confusion.) - -The ``w_`` prefixes so lavishly used in the example above indicate, -by PyPy coding convention, that we are dealing with *wrapped* (or *boxed*) objects, -that is, interpreter-level objects which the object space constructs -to implement corresponding application-level objects. Each object -space supplies ``wrap``, ``unwrap``, ``int_w``, ``interpclass_w``, -etc. operations that move between the two levels for objects of simple -built-in types; each object space also implements other Python types -with suitable interpreter-level classes with some amount of internal -structure. - -For example, an application-level Python ``list`` -is implemented by the `standard object space`_ as an -instance of ``W_ListObject``, which has an instance attribute -``wrappeditems`` (an interpreter-level list which contains the -application-level list's items as wrapped objects). - -The rules are described in more details below. - - -Naming conventions ------------------- - -* ``space``: the object space is only visible at - interpreter-level code, where it is by convention passed around by the name - ``space``. - -* ``w_xxx``: any object seen by application-level code is an - object explicitly managed by the object space. From the - interpreter-level point of view, this is called a *wrapped* - object. The ``w_`` prefix is used for any type of - application-level object. - -* ``xxx_w``: an interpreter-level container for wrapped - objects, for example a list or a dict containing wrapped - objects. Not to be confused with a wrapped object that - would be a list or a dict: these are normal wrapped objects, - so they use the ``w_`` prefix. - - -Operations on ``w_xxx`` ------------------------ - -The core bytecode interpreter considers wrapped objects as black boxes. -It is not allowed to inspect them directly. The allowed -operations are all implemented on the object space: they are -called ``space.xxx()``, where ``xxx`` is a standard operation -name (``add``, ``getattr``, ``call``, ``eq``...). They are documented in the -`object space document`_. - -A short warning: **don't do** ``w_x == w_y`` or ``w_x is w_y``! -rationale for this rule is that there is no reason that two -wrappers are related in any way even if they contain what -looks like the same object at application-level. To check -for equality, use ``space.is_true(space.eq(w_x, w_y))`` or -even better the short-cut ``space.eq_w(w_x, w_y)`` returning -directly a interpreter-level bool. To check for identity, -use ``space.is_true(space.is_(w_x, w_y))`` or better -``space.is_w(w_x, w_y)``. - -.. _`object space document`: objspace.html#interface - -.. _`applevel-exceptions`: - -Application-level exceptions ----------------------------- - -Interpreter-level code can use exceptions freely. However, -all application-level exceptions are represented as an -``OperationError`` at interpreter-level. In other words, all -exceptions that are potentially visible at application-level -are internally an ``OperationError``. This is the case of all -errors reported by the object space operations -(``space.add()`` etc.). - -To raise an application-level exception:: - - raise OperationError(space.w_XxxError, space.wrap("message")) - -To catch a specific application-level exception:: - - try: - ... - except OperationError, e: - if not e.match(space, space.w_XxxError): - raise - ... - -This construct catches all application-level exceptions, so we -have to match it against the particular ``w_XxxError`` we are -interested in and re-raise other exceptions. The exception -instance ``e`` holds two attributes that you can inspect: -``e.w_type`` and ``e.w_value``. Do not use ``e.w_type`` to -match an exception, as this will miss exceptions that are -instances of subclasses. - -We are thinking about replacing ``OperationError`` with a -family of common exception classes (e.g. ``AppKeyError``, -``AppIndexError``...) so that we can more easily catch them. -The generic ``AppError`` would stand for all other -application-level classes. - - -.. _`modules`: - -Modules in PyPy -=============== - -Modules visible from application programs are imported from -interpreter or application level files. PyPy reuses almost all python -modules of CPython's standard library, currently from version 2.5.2. We -sometimes need to `modify modules`_ and - more often - regression tests -because they rely on implementation details of CPython. - -If we don't just modify an original CPython module but need to rewrite -it from scratch we put it into `lib_pypy/`_ as a pure application level -module. - -When we need access to interpreter-level objects we put the module into -`pypy/module`_. Such modules use a `mixed module mechanism`_ -which makes it convenient to use both interpreter- and application-level parts -for the implementation. Note that there is no extra facility for -pure-interpreter level modules, you just write a mixed module and leave the -application-level part empty. - -Determining the location of a module implementation ---------------------------------------------------- - -You can interactively find out where a module comes from, when running py.py. -here are examples for the possible locations:: - - >>>> import sys - >>>> sys.__file__ - '/home/hpk/pypy-dist/pypy/module/sys/*.py' - - >>>> import operator - >>>> operator.__file__ - '/home/hpk/pypy-dist/lib_pypy/operator.py' - - >>>> import opcode - >>>> opcode.__file__ - '/home/hpk/pypy-dist/lib-python/modified-2.5.2/opcode.py' - - >>>> import os - faking - faking - >>>> os.__file__ - '/home/hpk/pypy-dist/lib-python/2.5.2/os.py' - >>>> - -Module directories / Import order ---------------------------------- - -Here is the order in which PyPy looks up Python modules: - -*pypy/modules* - - mixed interpreter/app-level builtin modules, such as - the ``sys`` and ``__builtin__`` module. - -*contents of PYTHONPATH* - - lookup application level modules in each of the ``:`` separated - list of directories, specified in the ``PYTHONPATH`` environment - variable. - -*lib_pypy/* - - contains pure Python reimplementation of modules. - -*lib-python/modified-2.5.2/* - - The files and tests that we have modified from the CPython library. - -*lib-python/2.5.2/* - - The unmodified CPython library. **Never ever check anything in there**. - -.. _`modify modules`: - -Modifying a CPython library module or regression test -------------------------------------------------------- - -Although PyPy is very compatible with CPython we sometimes need -to change modules contained in our copy of the standard library, -often due to the fact that PyPy works with all new-style classes -by default and CPython has a number of places where it relies -on some classes being old-style. - -If you want to change a module or test contained in ``lib-python/2.5.2`` -then make sure that you copy the file to our ``lib-python/modified-2.5.2`` -directory first. In subversion commandline terms this reads:: - - svn cp lib-python/2.5.2/somemodule.py lib-python/modified-2.5.2/ - -and subsequently you edit and commit -``lib-python/modified-2.5.2/somemodule.py``. This copying operation is -important because it keeps the original CPython tree clean and makes it -obvious what we had to change. - -.. _`mixed module mechanism`: -.. _`mixed modules`: - -Implementing a mixed interpreter/application level Module ---------------------------------------------------------- - -If a module needs to access PyPy's interpreter level -then it is implemented as a mixed module. - -Mixed modules are directories in `pypy/module`_ with an `__init__.py` -file containing specifications where each name in a module comes from. -Only specified names will be exported to a Mixed Module's applevel -namespace. - -Sometimes it is necessary to really write some functions in C (or -whatever target language). See `rffi`_ and `external functions -documentation`_ for details. The latter approach is cumbersome and -being phased out and former has currently quite a few rough edges. - -.. _`rffi`: rffi.html -.. _`external functions documentation`: translation.html#extfunccalls - -application level definitions -............................. - -Application level specifications are found in the `appleveldefs` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ you find the following -entry specifying where ``__builtin__.locals`` comes from:: - - ... - 'locals' : 'app_inspect.locals', - ... - -The ``app_`` prefix indicates that the submodule ``app_inspect`` is -interpreted at application level and the wrapped function value for ``locals`` -will be extracted accordingly. - -interpreter level definitions -............................. - -Interpreter level specifications are found in the ``interpleveldefs`` -dictionary found in ``__init__.py`` files of directories in ``pypy/module``. -For example, in `pypy/module/__builtin__/__init__.py`_ the following -entry specifies where ``__builtin__.len`` comes from:: - - ... - 'len' : 'operation.len', - ... - -The ``operation`` submodule lives at interpreter level and ``len`` -is expected to be exposable to application level. Here is -the definition for ``operation.len()``:: - - def len(space, w_obj): - "len(object) -> integer\n\nReturn the number of items of a sequence or mapping." - return space.len(w_obj) - -Exposed interpreter level functions usually take a ``space`` argument -and some wrapped values (see `wrapping rules`_) . - -You can also use a convenient shortcut in ``interpleveldefs`` dictionaries: -namely an expression in parentheses to specify an interpreter level -expression directly (instead of pulling it indirectly from a file):: - - ... - 'None' : '(space.w_None)', - 'False' : '(space.w_False)', - ... - -The interpreter level expression has a ``space`` binding when -it is executed. - -Adding an entry under pypy/module (e.g. mymodule) entails automatic -creation of a new config option (such as --withmod-mymodule and ---withoutmod-mymodule (the later being the default)) for py.py and -translate.py. - -Testing modules in ``lib_pypy/`` --------------------------------- - -You can go to the `lib_pypy/pypy_test/`_ directory and invoke the testing tool -("py.test" or "python ../../pypy/test_all.py") to run tests against the -lib_pypy hierarchy. Note, that tests in `lib_pypy/pypy_test/`_ are allowed -and encouraged to let their tests run at interpreter level although -`lib_pypy/`_ modules eventually live at PyPy's application level. -This allows us to quickly test our python-coded reimplementations -against CPython. - -Testing modules in ``pypy/module`` ----------------------------------- - -Simply change to ``pypy/module`` or to a subdirectory and `run the -tests as usual`_. - - -Testing modules in ``lib-python`` ------------------------------------ - -In order to let CPython's regression tests run against PyPy -you can switch to the `lib-python/`_ directory and run -the testing tool in order to start compliance tests. -(XXX check windows compatibility for producing test reports). - -Naming conventions and directory layout -=========================================== - -Directory and File Naming -------------------------- - -- directories/modules/namespaces are always **lowercase** - -- never use plural names in directory and file names - -- ``__init__.py`` is usually empty except for - ``pypy/objspace/*`` and ``pypy/module/*/__init__.py``. - -- don't use more than 4 directory nesting levels - -- keep filenames concise and completion-friendly. - -Naming of python objects ------------------------- - -- class names are **CamelCase** - -- functions/methods are lowercase and ``_`` separated - -- objectspace classes are spelled ``XyzObjSpace``. e.g. - - - StdObjSpace - - FlowObjSpace - -- at interpreter level and in ObjSpace all boxed values - have a leading ``w_`` to indicate "wrapped values". This - includes w_self. Don't use ``w_`` in application level - python only code. - -Committing & Branching to the repository ------------------------------------------------------ - -- write good log messages because several people - are reading the diffs. - -- if you add (text/py) files to the repository then please run - pypy/tool/fixeol in that directory. This will make sure - that the property 'svn:eol-style' is set to native which - allows checkin/checkout in native line-ending format. - -- branching (aka "svn copy") of source code should usually - happen at ``svn/pypy/trunk`` level in order to have a full - self-contained pypy checkout for each branch. For branching - a ``try1`` branch you would for example do:: - - svn cp http://codespeak.net/svn/pypy/trunk \ - http://codespeak.net/svn/pypy/branch/try1 - - This allows to checkout the ``try1`` branch and receive a - self-contained working-copy for the branch. Note that - branching/copying is a cheap operation with subversion, as it - takes constant time irrespective of the size of the tree. - -- To learn more about how to use subversion read `this document`_. - -.. _`this document`: svn-help.html - - - -.. _`using development tracker`: - -Using the development bug/feature tracker -========================================= - -We have a `development tracker`_, based on Richard Jones' -`roundup`_ application. You can file bugs, -feature requests or see what's going on -for the next milestone, both from an E-Mail and from a -web interface. - -use your codespeak login or register ------------------------------------- - -If you already committed to the PyPy source code, chances -are that you can simply use your codespeak login that -you use for subversion or for shell access. - -If you are not a commiter then you can still `register with -the tracker`_ easily. - -modifying Issues from svn commit messages ------------------------------------------ - -If you are committing something related to -an issue in the development tracker you -can correlate your login message to a tracker -item by following these rules: - -- put the content of ``issueN STATUS`` on a single - new line - -- `N` must be an existing issue number from the `development tracker`_. - -- STATUS is one of:: - - unread - chatting - in-progress - testing - duplicate - resolved - -.. _`register with the tracker`: https://codespeak.net/issue/pypy-dev/user?@template=register -.. _`development tracker`: http://codespeak.net/issue/pypy-dev/ -.. _`roundup`: http://roundup.sf.net - - -.. _`testing in PyPy`: -.. _`test-design`: - -Testing in PyPy -=============== - -Our tests are based on the new `py.test`_ tool which lets you write -unittests without boilerplate. All tests of modules -in a directory usually reside in a subdirectory **test**. There are -basically two types of unit tests: - -- **Interpreter Level tests**. They run at the same level as PyPy's - interpreter. - -- **Application Level tests**. They run at application level which means - that they look like straight python code but they are interpreted by PyPy. - -Both types of tests need an `objectspace`_ they can run with (the interpreter -dispatches operations on objects to an objectspace). If you run a test you -can usually give the '-o' switch to select an object space. E.g. '-o thunk' -will select the thunk object space. The default is the `Standard Object Space`_ -which aims to implement unmodified Python semantics. - -.. _`standard object space`: objspace.html#standard-object-space -.. _`objectspace`: objspace.html -.. _`py.test`: http://codespeak.net/py/current/doc/test.html - -Interpreter level tests ------------------------ - -You can write test functions and methods like this:: - - def test_something(space): - # use space ... - - class TestSomething: - def test_some(self): - # use 'self.space' here - -Note that the prefix `test` for test functions and `Test` for test -classes is mandatory. In both cases you can import Python modules at -module global level and use plain 'assert' statements thanks to the -usage of the `py.test`_ tool. - -Application Level tests ------------------------ - -For testing the conformance and well-behavedness of PyPy it -is often sufficient to write "normal" application-level -Python code that doesn't need to be aware of any particular -coding style or restrictions. If we have a choice we often -use application level tests which usually look like this:: - - def app_test_something(): - # application level test code - - class AppTestSomething: - def test_this(self): - # application level test code - -These application level test functions will run on top -of PyPy, i.e. they have no access to interpreter details. -You cannot use imported modules from global level because -they are imported at interpreter-level while you test code -runs at application level. If you need to use modules -you have to import them within the test function. - -Another possibility to pass in data into the AppTest is to use -the ``setup_class`` method of the AppTest. All wrapped objects that are -attached to the class there and start with ``w_`` can be accessed -via self (but without the ``w_``) in the actual test method. An example:: - - from pypy.objspace.std import StdObjSpace - - class AppTestErrno: - def setup_class(cls): - cls.space = StdObjSpace() - cls.w_d = cls.space.wrap({"a": 1, "b", 2}) - - def test_dict(self): - assert self.d["a"] == 1 - assert self.d["b"] == 2 - -.. _`run the tests as usual`: - -Command line tool test_all --------------------------- - -You can run almost all of PyPy's tests by invoking:: - - python test_all.py file_or_directory - -which is a synonym for the general `py.test`_ utility -located in the ``pypy`` directory. For switches to -modify test execution pass the ``-h`` option. - -Test conventions ----------------- - -- adding features requires adding appropriate tests. (It often even - makes sense to first write the tests so that you are sure that they - actually can fail.) - -- All over the pypy source code there are test/ directories - which contain unittests. Such scripts can usually be executed - directly or are collectively run by pypy/test_all.py - -- each test directory needs a copy of pypy/tool/autopath.py which - upon import will make sure that sys.path contains the directory - where 'pypy' is in. - -.. _`change documentation and website`: - -Changing documentation and website -================================== - -documentation/website files in your local checkout ---------------------------------------------------- - -Most of the PyPy's documentation and website is kept in -`pypy/documentation` and `pypy/documentation/website` respectively. -You can simply edit or add '.txt' files which contain ReST-markuped -files. Here is a `ReST quickstart`_ but you can also just look -at the existing documentation and see how things work. - -.. _`ReST quickstart`: http://docutils.sourceforge.net/docs/rst/quickref.html - -Automatically test documentation/website changes ------------------------------------------------- - -.. _`docutils home page`: -.. _`docutils`: http://docutils.sourceforge.net/ - -We automatically check referential integrity and ReST-conformance. In order to -run the tests you need docutils_ installed. Then go to the local checkout -of the documentation directory and run the tests:: - - cd .../pypy/documentation - python ../test_all.py - -If you see no failures chances are high that your modifications at least -don't produce ReST-errors or wrong local references. A side effect of running -the tests is that you have `.html` files in the documentation directory -which you can point your browser to! - -Additionally, if you also want to check for remote references inside -the documentation issue:: - - python ../test_all.py --checkremote - -which will check that remote URLs are reachable. - - -.. include:: _ref.txt diff --git a/pypy/doc/config/objspace.usemodules._ssl.txt b/pypy/doc/config/objspace.usemodules._ssl.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ssl.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the '_ssl' module, which implements SSL socket operations. diff --git a/pypy/doc/config/objspace.std.withrope.txt b/pypy/doc/config/objspace.std.withrope.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withrope.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable ropes to be the default string implementation. - -See the section in `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#ropes - - diff --git a/pypy/doc/discussion/outline-external-ootype.txt b/pypy/doc/discussion/outline-external-ootype.txt deleted file mode 100644 --- a/pypy/doc/discussion/outline-external-ootype.txt +++ /dev/null @@ -1,213 +0,0 @@ -Some discussion about external objects in ootype -================================================ - -Current approaches: - -* BasicExternal, used for js backend - -* SomeCliXxx for .NET backend - -BasicExternal -------------- - -* Is using types to make rpython happy (ie, every single method or field - is hardcoded) - -* Supports callbacks by SomeGenericCallable - -* Supports fields, also with callable fields - -SomeCliXxx ----------- - -* Supports method overloading - -* Supports inheritance in a better way - -* Supports static methods - -Would be extremely cool to have just one approach instead of two, -so here are some notes: - -* There should be one mechanism, factored out nicely out of any backend, - to support any possible backend (cli, js, jvm for now). - -* This approach might be eventually extended by a backend itself, but - as much as possible code should be factored out. - -* Backend should take care itself about creating such classes, either - manually or automatically. - -* Should support superset of needs of all backends (ie callbacks, - method overloading, etc.) - - -Proposal of alternative approach -================================ - -The goal of the task is to let RPython program access "external -objects" which are available in the target platform; these include: - - - external classes (e.g. for .NET: System.Collections.ArrayList) - - - external instances (e.g. for js: window, window.document) - - - external functions? (they are not needed for .NET and JVM, maybe - for js?) - -External objects should behave as much as possible as "internal -objects". - -Moreover, we want to preserve the possibility of *testing* RPython -programs on top of CPython if possible. For example, it should be -possible to RPython programs using .NET external objects using -PythonNet; probably there is something similar for JVM, but not for -JS as I know. - - -How to represent types ----------------------- - -First, some definitions: - - - high-level types are the types used by the annotator - (SomeInteger() & co.) - - - low-level types are the types used by the rtyper (Signed & co.) - - - platform-level types are the types used by the backends (e.g. int32 for - .NET) - -Usually, RPython types are described "top-down": we start from the -annotation, then the rtyper transforms the high-level types into -low-level types, then the backend transforms low-level types into -platform-level types. E.g. for .NET, SomeInteger() -> Signed -> int32. - -External objects are different: we *already* know the platform-level -types of our objects and we can't modify them. What we need to do is -to specify an annotation that after the high-level -> low-level -> -platform-level transformation will give us the correct types. - -For primitive types it is usually easy to find the correct annotation; -if we have an int32, we know that it's ootype is Signed and the -corresponding annotation is SomeInteger(). - -For non-primitive types such as classes, we must use a "bottom-up" -approach: first, we need a description of platform-level interface of -the class; then we construct the corresponding low-level type and -teach the backends how to treat such "external types". Finally, we -wrap the low-level types into special "external annotation". - -For example, consider a simple existing .NET class:: - - class Foo { - public float bar(int x, int y) { ... } - } - -The corresponding low-level type could be something like this:: - - Foo = ootype.ExternalInstance({'bar': ([Signed, Signed], Float)}) - -Then, the annotation for Foo's instances is SomeExternalInstance(Foo). -This way, the transformation from high-level types to platform-level -types is straightforward and correct. - -Finally, we need support for static methods: similarly for classes, we -can define an ExternalStaticMeth low-level type and a -SomeExternalStaticMeth annotation. - - -How to describe types ---------------------- - -To handle external objects we must specify their signatures. For CLI -and JVM the job can be easily automatized, since the objects have got -precise signatures. - -For JS, signatures must be written by hand, so we must provide a -convenient syntax for it; I think it should be possible to use the -current syntax and write a tool which translates it to low-level -types. - - -RPython interface ------------------ - -External objects are exposed as special Python objects that gets -annotated as SomeExternalXXX. Each backend can choose its own way to -provide these objects to the RPython programmer. - -External classes will be annotated as SomeExternalClass; two -operations are allowed: - - - call: used to instantiate the class, return an object which will - be annotated as SomeExternalInstance. - - - access to static methods: return an object which will be annotated - as SomeExternalStaticMeth. - -Instances are annotated as SomeExternalInstance. Prebuilt external -objects (such as JS's window.document) are annotated as -SomeExternalInstance(const=...). - -Open issues ------------ - -Exceptions -~~~~~~~~~~ - -.NET and JVM users want to catch external exceptions in a natural way; -e.g.:: - - try: - ... - except System.OverflowException: - ... - -This is not straightforward because to make the flow objspace happy the -object which represent System.OverflowException must be a real Python -class that inherits from Exception. - -This means that the Python objects which represent external classes -must be Python classes itself, and that classes representing -exceptions must be special cased and made subclasses of Exception. - - -Inheritance -~~~~~~~~~~~ - -It would be nice to allow programmers to inherit from an external -class. Not sure about the implications, though. - -Callbacks -~~~~~~~~~ - -I know that they are an issue for JS, but I don't know how they are -currently implemented. - -Special methods/properties -~~~~~~~~~~~~~~~~~~~~~~~~~~ - -In .NET there are special methods that can be accessed using a special -syntax, for example indexer or properties. It would be nice to have in -RPython the same syntax as C#. - - -Implementation details ----------------------- - -The CLI backend use a similar approach right now, but it could be -necessary to rewrite a part of it. - -To represent low-level types, it uses NativeInstance, a subclass of -ootype.Instance that contains all the information needed by the -backend to reference the class (e.g., the namespace). It also supports -overloading. - -For annotations, it reuses SomeOOInstance, which is also a wrapper -around a low-level type but it has been designed for low-level -helpers. It might be saner to use another annotation not to mix apples -and oranges, maybe factoring out common code. - -I don't know whether and how much code can be reused from the existing -bltregistry. diff --git a/pypy/doc/config/translation.linkerflags.txt b/pypy/doc/config/translation.linkerflags.txt deleted file mode 100644 --- a/pypy/doc/config/translation.linkerflags.txt +++ /dev/null @@ -1,1 +0,0 @@ -Experimental. Specify extra flags to pass to the linker. diff --git a/pypy/doc/config/objspace.std.withstrjoin.txt b/pypy/doc/config/objspace.std.withstrjoin.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withstrjoin.txt +++ /dev/null @@ -1,7 +0,0 @@ -Enable "string join" objects. - -See the page about `Standard Interpreter Optimizations`_ for more details. - -.. _`Standard Interpreter Optimizations`: ../interpreter-optimizations.html#string-join-objects - - diff --git a/pypy/doc/config/objspace.usemodules._file.txt b/pypy/doc/config/objspace.usemodules._file.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._file.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the '_file' module. It is an internal module that contains helper -functionality for the builtin ``file`` type. - -.. internal diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -184,7 +184,10 @@ class Optimization(object): next_optimization = None - + + def __init__(self): + pass # make rpython happy + def propagate_forward(self, op): raise NotImplementedError @@ -193,7 +196,7 @@ def test_emittable(self, op): return self.is_emittable(op) - + def is_emittable(self, op): return self.next_optimization.test_emittable(op) @@ -249,7 +252,7 @@ def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): #return self.__class__() raise NotImplementedError - + class Optimizer(Optimization): @@ -268,8 +271,10 @@ self.posponedop = None self.exception_might_have_happened = False self.newoperations = [] - if self.loop.inputvalues: - self.setup_inputstate() + if loop is not None: + self.call_pure_results = loop.call_pure_results + if self.loop.inputvalues: + self.setup_inputstate() self.set_optimizations(optimizations) def setup_inputstate(self): @@ -291,20 +296,20 @@ else: optimizations = [] self.first_optimization = self - - self.optimizations = optimizations + + self.optimizations = optimizations def force_at_end_of_preamble(self): self.resumedata_memo = resume.ResumeDataLoopMemo(self.metainterp_sd) for o in self.optimizations: o.force_at_end_of_preamble() - + def reconstruct_for_next_iteration(self, optimizer=None, valuemap=None): assert optimizer is None assert valuemap is None valuemap = {} new = Optimizer(self.metainterp_sd, self.loop) - optimizations = [o.reconstruct_for_next_iteration(new, valuemap) for o in + optimizations = [o.reconstruct_for_next_iteration(new, valuemap) for o in self.optimizations] new.set_optimizations(optimizations) @@ -321,7 +326,7 @@ for key, value in self.loop_invariant_results.items(): new.loop_invariant_results[key] = \ value.get_cloned(new, valuemap) - + new.pure_operations = self.pure_operations new.producer = self.producer assert self.posponedop is None @@ -445,7 +450,7 @@ def test_emittable(self, op): return True - + def emit_operation(self, op): ###self.heap_op_optimizer.emitting_operation(op) self._emit_operation(op) @@ -523,7 +528,7 @@ canfold = nextop.getopnum() == rop.GUARD_NO_OVERFLOW else: nextop = None - + if canfold: for i in range(op.numargs()): if self.get_constant_box(op.getarg(i)) is None: diff --git a/pypy/doc/_ref.txt b/pypy/doc/_ref.txt deleted file mode 100644 --- a/pypy/doc/_ref.txt +++ /dev/null @@ -1,107 +0,0 @@ -.. _`demo/`: ../../demo -.. _`demo/pickle_coroutine.py`: ../../demo/pickle_coroutine.py -.. _`lib-python/`: ../../lib-python -.. _`lib-python/2.5.2/dis.py`: ../../lib-python/2.5.2/dis.py -.. _`annotation/`: -.. _`pypy/annotation`: ../../pypy/annotation -.. _`pypy/annotation/annrpython.py`: ../../pypy/annotation/annrpython.py -.. _`annotation/binaryop.py`: ../../pypy/annotation/binaryop.py -.. _`pypy/annotation/builtin.py`: ../../pypy/annotation/builtin.py -.. _`pypy/annotation/model.py`: ../../pypy/annotation/model.py -.. _`bin/`: ../../pypy/bin -.. _`config/`: ../../pypy/config -.. _`pypy/config/pypyoption.py`: ../../pypy/config/pypyoption.py -.. _`doc/`: ../../pypy/doc -.. _`doc/config/`: ../../pypy/doc/config -.. _`doc/discussion/`: ../../pypy/doc/discussion -.. _`interpreter/`: -.. _`pypy/interpreter`: ../../pypy/interpreter -.. _`pypy/interpreter/argument.py`: ../../pypy/interpreter/argument.py -.. _`interpreter/astcompiler/`: -.. _`pypy/interpreter/astcompiler`: ../../pypy/interpreter/astcompiler -.. _`pypy/interpreter/executioncontext.py`: ../../pypy/interpreter/executioncontext.py -.. _`pypy/interpreter/function.py`: ../../pypy/interpreter/function.py -.. _`interpreter/gateway.py`: -.. _`pypy/interpreter/gateway.py`: ../../pypy/interpreter/gateway.py -.. _`pypy/interpreter/generator.py`: ../../pypy/interpreter/generator.py -.. _`pypy/interpreter/mixedmodule.py`: ../../pypy/interpreter/mixedmodule.py -.. _`pypy/interpreter/module.py`: ../../pypy/interpreter/module.py -.. _`pypy/interpreter/nestedscope.py`: ../../pypy/interpreter/nestedscope.py -.. _`pypy/interpreter/pyopcode.py`: ../../pypy/interpreter/pyopcode.py -.. _`interpreter/pyparser/`: -.. _`pypy/interpreter/pyparser`: ../../pypy/interpreter/pyparser -.. _`pypy/interpreter/pyparser/pytokenizer.py`: ../../pypy/interpreter/pyparser/pytokenizer.py -.. _`pypy/interpreter/pyparser/parser.py`: ../../pypy/interpreter/pyparser/parser.py -.. _`pypy/interpreter/pyparser/pyparse.py`: ../../pypy/interpreter/pyparser/pyparse.py -.. _`pypy/interpreter/pyparser/future.py`: ../../pypy/interpreter/pyparser/future.py -.. _`pypy/interpreter/pyparser/metaparser.py`: ../../pypy/interpreter/pyparser/metaparser.py -.. _`pypy/interpreter/astcompiler/astbuilder.py`: ../../pypy/interpreter/astcompiler/astbuilder.py -.. _`pypy/interpreter/astcompiler/optimize.py`: ../../pypy/interpreter/astcompiler/optimize.py -.. _`pypy/interpreter/astcompiler/codegen.py`: ../../pypy/interpreter/astcompiler/codegen.py -.. _`pypy/interpreter/astcompiler/tools/asdl_py.py`: ../../pypy/interpreter/astcompiler/tools/asdl_py.py -.. _`pypy/interpreter/astcompiler/tools/Python.asdl`: ../../pypy/interpreter/astcompiler/tools/Python.asdl -.. _`pypy/interpreter/astcompiler/assemble.py`: ../../pypy/interpreter/astcompiler/assemble.py -.. _`pypy/interpreter/astcompiler/symtable.py`: ../../pypy/interpreter/astcompiler/symtable.py -.. _`pypy/interpreter/astcompiler/asthelpers.py`: ../../pypy/interpreter/astcompiler/asthelpers.py -.. _`pypy/interpreter/astcompiler/ast.py`: ../../pypy/interpreter/astcompiler/ast.py -.. _`pypy/interpreter/typedef.py`: ../../pypy/interpreter/typedef.py -.. _`lib/`: -.. _`lib_pypy/`: ../../lib_pypy -.. _`lib/distributed/`: ../../lib_pypy/distributed -.. _`lib_pypy/stackless.py`: ../../lib_pypy/stackless.py -.. _`lib_pypy/pypy_test/`: ../../lib_pypy/pypy_test -.. _`module/`: -.. _`pypy/module`: -.. _`pypy/module/`: ../../pypy/module -.. _`pypy/module/__builtin__/__init__.py`: ../../pypy/module/__builtin__/__init__.py -.. _`pypy/module/_stackless/test/test_clonable.py`: ../../pypy/module/_stackless/test/test_clonable.py -.. _`pypy/module/_stackless/test/test_composable_coroutine.py`: ../../pypy/module/_stackless/test/test_composable_coroutine.py -.. _`objspace/`: -.. _`pypy/objspace`: ../../pypy/objspace -.. _`objspace/dump.py`: ../../pypy/objspace/dump.py -.. _`objspace/flow/`: ../../pypy/objspace/flow -.. _`objspace/std/`: -.. _`pypy/objspace/std`: ../../pypy/objspace/std -.. _`objspace/taint.py`: ../../pypy/objspace/taint.py -.. _`objspace/thunk.py`: -.. _`pypy/objspace/thunk.py`: ../../pypy/objspace/thunk.py -.. _`objspace/trace.py`: -.. _`pypy/objspace/trace.py`: ../../pypy/objspace/trace.py -.. _`pypy/rlib`: -.. _`rlib/`: ../../pypy/rlib -.. _`pypy/rlib/rarithmetic.py`: ../../pypy/rlib/rarithmetic.py -.. _`pypy/rlib/test`: ../../pypy/rlib/test -.. _`pypy/rpython`: -.. _`pypy/rpython/`: -.. _`rpython/`: ../../pypy/rpython -.. _`rpython/lltypesystem/`: ../../pypy/rpython/lltypesystem -.. _`pypy/rpython/lltypesystem/lltype.py`: -.. _`rpython/lltypesystem/lltype.py`: ../../pypy/rpython/lltypesystem/lltype.py -.. _`rpython/memory/`: ../../pypy/rpython/memory -.. _`rpython/memory/gc/generation.py`: ../../pypy/rpython/memory/gc/generation.py -.. _`rpython/memory/gc/hybrid.py`: ../../pypy/rpython/memory/gc/hybrid.py -.. _`rpython/memory/gc/markcompact.py`: ../../pypy/rpython/memory/gc/markcompact.py -.. _`rpython/memory/gc/marksweep.py`: ../../pypy/rpython/memory/gc/marksweep.py -.. _`rpython/memory/gc/semispace.py`: ../../pypy/rpython/memory/gc/semispace.py -.. _`rpython/ootypesystem/`: ../../pypy/rpython/ootypesystem -.. _`rpython/ootypesystem/ootype.py`: ../../pypy/rpython/ootypesystem/ootype.py -.. _`rpython/rint.py`: ../../pypy/rpython/rint.py -.. _`rpython/rlist.py`: ../../pypy/rpython/rlist.py -.. _`rpython/rmodel.py`: ../../pypy/rpython/rmodel.py -.. _`pypy/rpython/rtyper.py`: ../../pypy/rpython/rtyper.py -.. _`pypy/rpython/test/test_llinterp.py`: ../../pypy/rpython/test/test_llinterp.py -.. _`pypy/test_all.py`: ../../pypy/test_all.py -.. _`tool/`: ../../pypy/tool -.. _`tool/algo/`: ../../pypy/tool/algo -.. _`tool/pytest/`: ../../pypy/tool/pytest -.. _`pypy/translator`: -.. _`translator/`: ../../pypy/translator -.. _`translator/backendopt/`: ../../pypy/translator/backendopt -.. _`translator/c/`: ../../pypy/translator/c -.. _`translator/cli/`: ../../pypy/translator/cli -.. _`translator/goal/`: ../../pypy/translator/goal -.. _`pypy/translator/goal/targetnopstandalone.py`: ../../pypy/translator/goal/targetnopstandalone.py -.. _`translator/jvm/`: ../../pypy/translator/jvm -.. _`translator/stackless/`: ../../pypy/translator/stackless -.. _`translator/tool/`: ../../pypy/translator/tool -.. _`translator/js/`: http://codespeak.net/svn/pypy/branch/oo-jit/pypy/translator/js/ diff --git a/pypy/doc/config/objspace.usemodules._ffi.txt b/pypy/doc/config/objspace.usemodules._ffi.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._ffi.txt +++ /dev/null @@ -1,1 +0,0 @@ -Applevel interface to libffi. It is more high level than _rawffi, and most importantly it is JIT friendly diff --git a/pypy/doc/config/opt.txt b/pypy/doc/config/opt.txt deleted file mode 100644 --- a/pypy/doc/config/opt.txt +++ /dev/null @@ -1,50 +0,0 @@ -The ``--opt`` or ``-O`` translation option -========================================== - -This meta-option selects a default set of optimization -settings to use during a translation. Usage:: - - translate.py --opt=# - translate.py -O# - -where ``#`` is the desired optimization level. The valid choices are: - - ============= ======================================================== - Level Description - ============= ======================================================== - `--opt=0` all optimizations off; fastest translation `(*)`_ - `--opt=1` non-time-consuming optimizations on `(*)`_ - `--opt=size` minimize the size of the final executable `(*)`_ - `--opt=mem` minimize the run-time RAM consumption (in-progress) - `--opt=2` all optimizations on; good run-time performance - `--opt=3` same as `--opt=2`; remove asserts; gcc profiling `(**)`_ - `--opt=jit` includes the JIT and tweak other optimizations for it - ============= ======================================================== - -.. _`(*)`: - -`(*)`: The levels `0, 1` and `size` use the `Boehm-Demers-Weiser -garbage collector`_ (Debian package ``libgc-dev``). The translation -itself is faster and consumes less memory; the final executable is -smaller but slower. The other levels use one of our built-in `custom -garbage collectors`_. - -.. _`(**)`: - -`(**)`: The level `3` enables gcc profile-driven recompilation when -translating PyPy. - -The exact set of optimizations enabled by each level depends -on the backend. Individual translation targets can also -select their own options based on the level: when translating -PyPy, the level `mem` enables the memory-saving object -implementations in the object space; levels `2` and `3` enable -the advanced object implementations that give an increase in -performance; level `3` also enables gcc profile-driven -recompilation. - -The default level is `2`. - - -.. _`Boehm-Demers-Weiser garbage collector`: http://www.hpl.hp.com/personal/Hans_Boehm/gc/ -.. _`custom garbage collectors`: ../garbage_collection.html diff --git a/pypy/doc/config/objspace.usemodules.itertools.txt b/pypy/doc/config/objspace.usemodules.itertools.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.itertools.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the interp-level 'itertools' module. -If not included, a slower app-level version of itertools is used. diff --git a/pypy/doc/config/translation.jit.txt b/pypy/doc/config/translation.jit.txt deleted file mode 100644 --- a/pypy/doc/config/translation.jit.txt +++ /dev/null @@ -1,2 +0,0 @@ -Enable the JIT generator, for targets that have JIT support. -Experimental so far. diff --git a/pypy/doc/ctypes-implementation.txt b/pypy/doc/ctypes-implementation.txt deleted file mode 100644 --- a/pypy/doc/ctypes-implementation.txt +++ /dev/null @@ -1,184 +0,0 @@ - -============================= -PyPy's ctypes implementation -============================= - -Summary -======== - -Terminology: - -* application level code - code written in full Python - -* interpreter level code - code written in RPython, compiled - to something else, say C, part of the interpreter. - -PyPy's ctypes implementation in its current state proves the -feasibility of implementing a module with the same interface and -behavior for PyPy as ctypes for CPython. - -PyPy's implementation internally uses `libffi`_ like CPython's ctypes. -In our implementation as much as possible of the code is written in -full Python, not RPython. In CPython's situation, the equivalent would -be to write as little as possible code in C. We essentially favored -rapid experimentation over worrying about speed for this first trial -implementation. This allowed to provide a working implementation with -a large part of ctypes features in 2 months real time. - -We reused the ``ctypes`` package version 1.0.2 as-is from CPython. We -implemented ``_ctypes`` which is a C module in CPython mostly in pure -Python based on a lower-level layer extension module ``_rawffi``. - -.. _`libffi`: http://sources.redhat.com/libffi/ - -Low-level part: ``_rawffi`` -============================ - -This PyPy extension module (``pypy/module/_rawffi``) exposes a simple interface -to create C objects (arrays and structures) and calling functions -in dynamic libraries through libffi. Freeing objects in most cases and making -sure that objects referring to each other are kept alive is responsibility of the higher levels. - -This module uses bindings to libffi which are defined in ``pypy/rlib/libffi.py``. - -We tried to keep this module as small as possible. It is conceivable -that other implementations (e.g. Jython) could use our ctypes -implementation by writing their version of ``_rawffi``. - -High-level parts -================= - -The reused ``ctypes`` package lives in ``lib_pypy/ctypes``. ``_ctypes`` -implementing the same interface as ``_ctypes`` in CPython is in -``lib_pypy/_ctypes``. - -Discussion and limitations -============================= - -Reimplementing ctypes features was in general possible. PyPy supports -pluggable garbage collectors, some of them are moving collectors, this -means that the strategy of passing direct references inside Python -objects to an external library is not feasible (unless the GCs -support pinning, which is not the case right now). The consequence of -this is that sometimes copying instead of sharing is required, this -may result in some semantics differences. C objects created with -_rawffi itself are allocated outside of the GC heap, such that they can be -passed to external functions without worries. - -Porting the implementation to interpreter-level should likely improve -its speed. Furthermore the current layering and the current _rawffi -interface require more object allocations and copying than strictly -necessary; this too could be improved. - -The implementation was developed and has only been tested on x86-32 Linux. - -Here is a list of the limitations and missing features of the -current implementation: - -* No support for ``PyXxx`` functions from ``libpython``, for obvious reasons. - -* We copy Python strings instead of having pointers to raw buffers - -* Features we did not get to implement: - - - custom alignment and bit-fields - - - resizing (``resize()`` function) - - - non-native byte-order objects - - - callbacks accepting by-value structures - - - slight semantic differences that ctypes makes - between its primitive types and user subclasses - of its primitive types - -Getting the code and test suites -================================= - -A stable revision of PyPy containing the ctypes implementation can be checked out with subversion from the tag: - -http://codespeak.net/svn/pypy/tag/ctypes-stable - -The various tests and later examples can be run on x86-32 Linux. We tried them -on an up-to-date Ubuntu 7.10 x86-32 system. - -If one goes inside the checkout it is possible to run ``_rawffi`` tests with:: - - $ cd pypy - $ python test_all.py module/_rawffi/ - -The ctypes implementation test suite is derived from the tests for -ctypes 1.0.2, we have skipped some tests corresponding to not -implemented features or implementation details, we have also added -some tests. - -To run the test suite a compiled pypy-c is required with the proper configuration. To build the required pypy-c one should inside the checkout:: - - $ cd pypy/translator/goal - $ ./translate.py --text --batch --gc=generation targetpypystandalone.py - --withmod-_rawffi --allworkingmodules - -this should produce a pypy-c executable in the ``goal`` directory. - -To run the tests then:: - - $ cd ../../.. # back to pypy-trunk - $ ./pypy/translator/goal/pypy-c pypy/test_all.py lib/pypy1.2/lib_pypy/pypy_test/ctypes_tests - -There should be 36 skipped tests and all other tests should pass. - -Running application examples -============================== - -`pyglet`_ is known to run. We had some success also with pygame-ctypes which is not maintained anymore and with a snapshot of the experimental pysqlite-ctypes. We will only describe how to run the pyglet examples. - -pyglet -------- - -We tried pyglet checking it out from its repository at revision 1984. -For convenience a tarball of the checkout can also be found at: - -http://codespeak.net/~pedronis/pyglet-r1984.tgz - -From pyglet, the following examples are known to work: - - - opengl.py - - multiple_windows.py - - events.py - - html_label.py - - timer.py - - window_platform_event.py - - fixed_resolution.py - -The pypy-c translated to run the ctypes tests can be used to run the pyglet examples as well. They can be run like e.g.:: - - $ cd pyglet/ - $ PYTHONPATH=. ../ctypes-stable/pypy/translator/goal/pypy-c examples/opengl.py - - -they usually should be terminated with ctrl-c. Refer to the their doc strings for details about how they should behave. - -The following examples don't work for reasons independent from ctypes: - - - image_convert.py needs PIL - - image_display.py needs PIL - - astraea/astraea.py needs PIL - -We did not try the following examples: - - - media_player.py needs avbin or at least a proper sound card setup for - .wav files - - video.py needs avbin - - soundscape needs avbin - -.. _`pyglet`: http://pyglet.org/ - - -ctypes configure -================= - -We also released `ctypes-configure`_, which is an experimental package trying to -approach the portability issues of ctypes-based code. - -.. _`ctypes-configure`: http://codespeak.net/~fijal/configure.html diff --git a/pypy/doc/config/objspace.name.txt b/pypy/doc/config/objspace.name.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.name.txt +++ /dev/null @@ -1,16 +0,0 @@ -Determine which `Object Space`_ to use. The `Standard Object Space`_ gives the -normal Python semantics, the others are `Object Space Proxies`_ giving -additional features (except the Flow Object Space which is not intended -for normal usage): - - * thunk_: The thunk object space adds lazy evaluation to PyPy. - * taint_: The taint object space adds soft security features. - * dump_: Using this object spaces results in the dumpimp of all operations - to a log. - -.. _`Object Space`: ../objspace.html -.. _`Object Space Proxies`: ../objspace-proxies.html -.. _`Standard Object Space`: ../objspace.html#standard-object-space -.. _thunk: ../objspace-proxies.html#thunk -.. _taint: ../objspace-proxies.html#taint -.. _dump: ../objspace-proxies.html#dump diff --git a/pypy/doc/config/translation.stackless.txt b/pypy/doc/config/translation.stackless.txt deleted file mode 100644 --- a/pypy/doc/config/translation.stackless.txt +++ /dev/null @@ -1,5 +0,0 @@ -Run the `stackless transform`_ on each generated graph, which enables the use -of coroutines at RPython level and the "stackless" module when translating -PyPy. - -.. _`stackless transform`: ../stackless.html diff --git a/pypy/doc/config/objspace.std.methodcachesizeexp.txt b/pypy/doc/config/objspace.std.methodcachesizeexp.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.methodcachesizeexp.txt +++ /dev/null @@ -1,1 +0,0 @@ -Set the cache size (number of entries) for :config:`objspace.std.withmethodcache`. diff --git a/pypy/doc/index-report.txt b/pypy/doc/index-report.txt deleted file mode 100644 --- a/pypy/doc/index-report.txt +++ /dev/null @@ -1,169 +0,0 @@ -============================================ -PyPy - Overview over the EU-reports -============================================ - -Below reports summarize and discuss research and development results -of the PyPy project during the EU funding period (Dez 2004 - March 2007). -They also are very good documentation if you'd like to know in more -detail about motivation and implementation of the various parts -and aspects of PyPy. Feel free to send questions or comments -to `pypy-dev`_, the development list. - -Reports of 2007 -=============== - -The `PyPy EU Final Activity Report`_ summarizes the 28 month EU project -period (Dec 2004-March 2007) on technical, scientific and community levels. -You do not need prior knowledge about PyPy but some technical knowledge about -computer language implementations is helpful. The report contains reflections -and recommendations which might be interesting for other project aiming -at funded Open Source research. *(2007-05-11)* - -`D09.1 Constraint Solving and Semantic Web`_ is a report about PyPy's logic -programming and constraint solving features, as well as the work going on to -tie semantic web technologies and PyPy together. *(2007-05-11)* - -`D14.4 PyPy-1.0 Milestone report`_ (for language developers and researchers) -summarizes research & technical results of the PyPy-1.0 release and discusses -related development process and community aspects. *(2007-05-01)* - -`D08.2 JIT Compiler Architecture`_ is a report about the Architecture and -working of our JIT compiler generator. *(2007-05-01)* - -`D08.1 JIT Compiler Release`_ reports on our successfully including a -JIT compiler for Python and the novel framework we used to -automatically generate it in PyPy 1.0. *(2007-04-30)* - -`D06.1 Core Object Optimization Results`_ documents the optimizations -we implemented in the interpreter and object space: dictionary -implementations, method call optimizations, etc. The report is still not final -so we are very interested in any feedback *(2007-04-04)* - -`D14.5 Documentation of the development process`_ documents PyPy's -sprint-driven development process and puts it into the context of agile -methodologies. *(2007-03-30)* - -`D13.1 Integration and Configuration`_ is a report about our build and -configuration toolchain as well as the planned Debian packages. It also -describes the work done to integrate the results of other workpackages into the -rest of the project. *(2007-03-30)* - -`D02.2 Release Scheme`_ lists PyPy's six public releases and explains the release structure, tools, directories and policies for performing PyPy releases. *(2007-03-30)* - -`D01.2-4 Project Organization`_ is a report about the management activities -within the PyPy project and PyPy development process. *(2007-03-28)* - -`D11.1 PyPy for Embedded Devices`_ is a report about the possibilities of using -PyPy technology for programming embedded devices. *(2007-03-26)* - -`D02.3 Testing Tool`_ is a report about the -`py.test`_ testing tool which is part of the `py-lib`_. *(2007-03-23)* - -`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static -checking`_ is a report about the ``aop`` module providing an Aspect Oriented -Programming mechanism for PyPy, and how this can be leveraged to implement a -Design-by-Contract module. It also introduces RPylint static type checker for -RPython code. *(2007-03-22)* - -`D12.1 High-Level-Backends and Feature Prototypes`_ is -a report about our high-level backends and our -several validation prototypes: an information flow security prototype, -a distribution prototype and a persistence proof-of-concept. *(2007-03-22)* - -`D14.2 Tutorials and Guide Through the PyPy Source Code`_ is -a report about the steps we have taken to make the project approachable for -newcomers. *(2007-03-22)* - - -`D02.1 Development Tools and Website`_ is a report -about the codespeak_ development environment and additional tool support for the -PyPy development process. *(2007-03-21)* - -`D03.1 Extension Compiler`_ is a report about -PyPy's extension compiler and RCTypes, as well as the effort to keep up with -CPython's changes. *(2007-03-21)* - - -`D07.1 Massive Parallelism and Translation Aspects`_ is a report about -PyPy's optimization efforts, garbage collectors and massive parallelism -(stackless) features. This report refers to the paper `PyPy's approach -to virtual machine construction`_. *(2007-02-28)* - - - -.. _`py-lib`: http://codespeak.net/py/current/doc/ -.. _`py.test`: http://codespeak.net/py/current/doc/test.html -.. _codespeak: http://codespeak.net/ -.. _`pypy-dev`: http://codespeak.net/mailman/listinfo/pypy-dev - - -Reports of 2006 -=============== - -`D14.3 Report about Milestone/Phase 2`_ is the final report about -the second phase of the EU project, summarizing and detailing technical, -research, dissemination and community aspects. Feedback is very welcome! - - -Reports of 2005 -=============== - -`D04.1 Partial Python Implementation`_ contains details about the 0.6 release. -All the content can be found in the regular documentation section. - -`D04.2 Complete Python Implementation`_ contains details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D04.3 Parser and Bytecode Compiler`_ describes our parser and bytecode compiler. - -`D04.4 PyPy as a Research Tool`_ contains details about the 0.8 release. -All the content can be found in the regular documentation section. - -`D05.1 Compiling Dynamic Language Implementations`_ is a paper that describes -the translation process, especially the flow object space and the annotator in -detail. - -`D05.2 A Compiled Version of PyPy`_ contains more details about the 0.7 release. -All the content can be found in the regular documentation section. - -`D05.3 Implementation with Translation Aspects`_ -describes how our approach hides away a lot of low level details. - -`D05.4 Encapsulating Low Level Aspects`_ describes how we weave different -properties into our interpreter during the translation process. - -`D14.1 Report about Milestone/Phase 1`_ describes what happened in the PyPy -project during the first year of EU funding (December 2004 - December 2005) - -.. _`PyPy EU Final Activity Report`: http://codespeak.net/pypy/extradoc/eu-report/PYPY-EU-Final-Activity-Report.pdf -.. _`D01.2-4 Project Organization`: http://codespeak.net/pypy/extradoc/eu-report/D01.2-4_Project_Organization-2007-03-28.pdf -.. _`D02.1 Development Tools and Website`: http://codespeak.net/pypy/extradoc/eu-report/D02.1_Development_Tools_and_Website-2007-03-21.pdf -.. _`D02.2 Release Scheme`: http://codespeak.net/svn/pypy/extradoc/eu-report/D02.2_Release_Scheme-2007-03-30.pdf -.. _`D02.3 Testing Tool`: http://codespeak.net/pypy/extradoc/eu-report/D02.3_Testing_Framework-2007-03-23.pdf -.. _`D03.1 Extension Compiler`: http://codespeak.net/pypy/extradoc/eu-report/D03.1_Extension_Compiler-2007-03-21.pdf -.. _`D04.1 Partial Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.1_Partial_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.2 Complete Python Implementation`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.2_Complete_Python_Implementation_on_top_of_CPython.pdf -.. _`D04.3 Parser and Bytecode Compiler`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.3_Report_about_the_parser_and_bytecode_compiler.pdf -.. _`D04.4 PyPy as a Research Tool`: http://codespeak.net/svn/pypy/extradoc/eu-report/D04.4_Release_PyPy_as_a_research_tool.pdf -.. _`D05.1 Compiling Dynamic Language Implementations`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf -.. _`D05.2 A Compiled Version of PyPy`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.2_A_compiled,_self-contained_version_of_PyPy.pdf -.. _`D05.3 Implementation with Translation Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.3_Publish_on_implementation_with_translation_aspects.pdf -.. _`D05.4 Encapsulating Low Level Aspects`: http://codespeak.net/svn/pypy/extradoc/eu-report/D05.4_Publish_on_encapsulating_low_level_language_aspects.pdf -.. _`D06.1 Core Object Optimization Results`: http://codespeak.net/svn/pypy/extradoc/eu-report/D06.1_Core_Optimizations-2007-04-30.pdf -.. _`D07.1 Massive Parallelism and Translation Aspects`: http://codespeak.net/pypy/extradoc/eu-report/D07.1_Massive_Parallelism_and_Translation_Aspects-2007-02-28.pdf -.. _`D08.2 JIT Compiler Architecture`: http://codespeak.net/pypy/extradoc/eu-report/D08.2_JIT_Compiler_Architecture-2007-05-01.pdf -.. _`D08.1 JIT Compiler Release`: http://codespeak.net/pypy/extradoc/eu-report/D08.1_JIT_Compiler_Release-2007-04-30.pdf -.. _`D09.1 Constraint Solving and Semantic Web`: http://codespeak.net/pypy/extradoc/eu-report/D09.1_Constraint_Solving_and_Semantic_Web-2007-05-11.pdf -.. _`D10.1 Aspect-Oriented, Design-by-Contract Programming and RPython static checking`: http://codespeak.net/pypy/extradoc/eu-report/D10.1_Aspect_Oriented_Programming_in_PyPy-2007-03-22.pdf -.. _`D11.1 PyPy for Embedded Devices`: http://codespeak.net/pypy/extradoc/eu-report/D11.1_PyPy_for_Embedded_Devices-2007-03-26.pdf -.. _`D12.1 High-Level-Backends and Feature Prototypes`: http://codespeak.net/pypy/extradoc/eu-report/D12.1_H-L-Backends_and_Feature_Prototypes-2007-03-22.pdf -.. _`D13.1 Integration and Configuration`: http://codespeak.net/pypy/extradoc/eu-report/D13.1_Integration_and_Configuration-2007-03-30.pdf -.. _`D14.1 Report about Milestone/Phase 1`: http://codespeak.net/svn/pypy/extradoc/eu-report/D14.1_Report_about_Milestone_Phase_1.pdf -.. _`D14.2 Tutorials and Guide Through the PyPy Source Code`: http://codespeak.net/pypy/extradoc/eu-report/D14.2_Tutorials_and_Guide_Through_the_PyPy_Source_Code-2007-03-22.pdf -.. _`D14.3 Report about Milestone/Phase 2`: http://codespeak.net/pypy/extradoc/eu-report/D14.3_Report_about_Milestone_Phase_2-final-2006-08-03.pdf -.. _`D14.4 PyPy-1.0 Milestone report`: http://codespeak.net/pypy/extradoc/eu-report/D14.4_Report_About_Milestone_Phase_3-2007-05-01.pdf -.. _`D14.5 Documentation of the development process`: http://codespeak.net/pypy/extradoc/eu-report/D14.5_Documentation_of_the_development_process-2007-03-30.pdf - - - -.. _`PyPy's approach to virtual machine construction`: http://codespeak.net/svn/pypy/extradoc/talk/dls2006/pypy-vm-construction.pdf diff --git a/pypy/doc/config/objspace.usemodules.marshal.txt b/pypy/doc/config/objspace.usemodules.marshal.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.marshal.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'marshal' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.usemodules.symbol.txt b/pypy/doc/config/objspace.usemodules.symbol.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.symbol.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'symbol' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/objspace.std.withsmallint.txt b/pypy/doc/config/objspace.std.withsmallint.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withsmallint.txt +++ /dev/null @@ -1,6 +0,0 @@ -Use "tagged pointers" to represent small enough integer values: Integers that -fit into 31 bits (respective 63 bits on 64 bit machines) are not represented by -boxing them in an instance of ``W_IntObject``. Instead they are represented as a -pointer having the lowest bit set and the rest of the bits used to store the -value of the integer. This gives a small speedup for integer operations as well -as better memory behaviour. diff --git a/pypy/doc/config/translation.list_comprehension_operations.txt b/pypy/doc/config/translation.list_comprehension_operations.txt deleted file mode 100644 --- a/pypy/doc/config/translation.list_comprehension_operations.txt +++ /dev/null @@ -1,2 +0,0 @@ -Experimental optimization for list comprehensions in RPython. - diff --git a/pypy/doc/cleanup-todo.txt b/pypy/doc/cleanup-todo.txt deleted file mode 100644 --- a/pypy/doc/cleanup-todo.txt +++ /dev/null @@ -1,30 +0,0 @@ - -PyPy cleanup areas -================== - -This is a todo list that lists various areas of PyPy that should be cleaned up -(for whatever reason: less mess, less code duplication, etc). - -translation toolchain ---------------------- - - - low level backends should share more code - - all backends should have more consistent interfaces - - geninterp is a hack - - delegate finding type stuff like vtables etc to GC, cleaner interface for rtti, - simplify translator/c/gc.py - - clean up the tangle of including headers in the C backend - - make approach for loading modules more sane, mixedmodule capture - too many platform dependencies especially for pypy-cli - - review pdbplus, especially the graph commands, also in the light of - https://codespeak.net/issue/pypy-dev/issue303 and the fact that - we can have more than one translator/annotator around (with the - timeshifter) - -interpreter ------------ - - - review the things implemented at applevel whether they are performance- - critical - - - review CPython regression test suite, enable running tests, fix bugs diff --git a/pypy/doc/config/objspace.usemodules.sys.txt b/pypy/doc/config/objspace.usemodules.sys.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.sys.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'sys' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/config/translation.verbose.txt b/pypy/doc/config/translation.verbose.txt deleted file mode 100644 --- a/pypy/doc/config/translation.verbose.txt +++ /dev/null @@ -1,1 +0,0 @@ -Print some more information during translation. diff --git a/pypy/doc/config/objspace.usepycfiles.txt b/pypy/doc/config/objspace.usepycfiles.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usepycfiles.txt +++ /dev/null @@ -1,4 +0,0 @@ -If this option is used, then PyPy imports and generates "pyc" files in the -same way as CPython. This is true by default and there is not much reason -to turn it off nowadays. If off, PyPy never produces "pyc" files and -ignores any "pyc" file that might already be present. diff --git a/pypy/doc/config/translation.backendopt.print_statistics.txt b/pypy/doc/config/translation.backendopt.print_statistics.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.print_statistics.txt +++ /dev/null @@ -1,2 +0,0 @@ -Debugging option. Print statics about the forest of flowgraphs as they -go through the various backend optimizations. \ No newline at end of file diff --git a/pypy/doc/config/translation.gcremovetypeptr.txt b/pypy/doc/config/translation.gcremovetypeptr.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gcremovetypeptr.txt +++ /dev/null @@ -1,1 +0,0 @@ -If set, save one word in every object. Framework GC only. diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -3,8 +3,102 @@ from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.rlib.objectmodel import we_are_translated from pypy.jit.metainterp.jitexc import JitException +from pypy.jit.metainterp.optimizeopt.optimizer import Optimization -from pypy.jit.metainterp.optimizeopt.optimizer import Optimization + +class CachedField(object): + def __init__(self): + # Cache information for a field descr. It can be in one + # of two states: + # + # 1. 'cached_fields' is a dict mapping OptValues of structs + # to OptValues of fields. All fields on-heap are + # synchronized with the values stored in the cache. + # + # 2. we just did one setfield, which is delayed (and thus + # not synchronized). 'lazy_setfield' is the delayed + # ResOperation. In this state, 'cached_fields' contains + # out-of-date information. More precisely, the field + # value pending in the ResOperation is *not* visible in + # 'cached_fields'. + # + self._cached_fields = {} + self._lazy_setfield = None + self._lazy_setfield_registered = False + + def do_setfield(self, optheap, op): + # Update the state with the SETFIELD_GC operation 'op'. + structvalue = optheap.getvalue(op.getarg(0)) + fieldvalue = optheap.getvalue(op.getarg(1)) + if self.possible_aliasing(optheap, structvalue): + self.force_lazy_setfield(optheap) + assert not self.possible_aliasing(optheap, structvalue) + cached_fieldvalue = self._cached_fields.get(structvalue, None) + if cached_fieldvalue is not fieldvalue: + # common case: store the 'op' as lazy_setfield, and register + # myself in the optheap's _lazy_setfields list + self._lazy_setfield = op + if not self._lazy_setfield_registered: + optheap._lazy_setfields.append(self) + self._lazy_setfield_registered = True + else: + # this is the case where the pending setfield ends up + # storing precisely the value that is already there, + # as proved by 'cached_fields'. In this case, we don't + # need any _lazy_setfield: the heap value is already right. + # Note that this may reset to None a non-None lazy_setfield, + # cancelling its previous effects with no side effect. + self._lazy_setfield = None + + def possible_aliasing(self, optheap, structvalue): + # If lazy_setfield is set and contains a setfield on a different + # structvalue, then we are annoyed, because it may point to either + # the same or a different structure at runtime. + return (self._lazy_setfield is not None + and (optheap.getvalue(self._lazy_setfield.getarg(0)) + is not structvalue)) + + def getfield_from_cache(self, optheap, structvalue): + # Returns the up-to-date field's value, or None if not cached. + if self.possible_aliasing(optheap, structvalue): + self.force_lazy_setfield(optheap) + if self._lazy_setfield is not None: + op = self._lazy_setfield + assert optheap.getvalue(op.getarg(0)) is structvalue + return optheap.getvalue(op.getarg(1)) + else: + return self._cached_fields.get(structvalue, None) + + def remember_field_value(self, structvalue, fieldvalue): + assert self._lazy_setfield is None + self._cached_fields[structvalue] = fieldvalue + + def force_lazy_setfield(self, optheap): + op = self._lazy_setfield + if op is not None: + # This is the way _lazy_setfield is usually reset to None. + # Now we clear _cached_fields, because actually doing the + # setfield might impact any of the stored result (because of + # possible aliasing). + self._cached_fields.clear() + self._lazy_setfield = None + optheap.next_optimization.propagate_forward(op) + # Once it is done, we can put at least one piece of information + # back in the cache: the value of this particular structure's + # field. + structvalue = optheap.getvalue(op.getarg(0)) + fieldvalue = optheap.getvalue(op.getarg(1)) + self.remember_field_value(structvalue, fieldvalue) + + def get_reconstructed(self, optimizer, valuemap): + assert self._lazy_setfield is None + cf = CachedField() + for structvalue, fieldvalue in self._cached_fields.iteritems(): + structvalue2 = structvalue.get_reconstructed(optimizer, valuemap) + fieldvalue2 = fieldvalue .get_reconstructed(optimizer, valuemap) + cf._cached_fields[structvalue2] = fieldvalue2 + return cf + class CachedArrayItems(object): def __init__(self): @@ -20,40 +114,23 @@ """Cache repeated heap accesses""" def __init__(self): - # cached fields: {descr: {OptValue_instance: OptValue_fieldvalue}} + # cached fields: {descr: CachedField} self.cached_fields = {} - self.known_heap_fields = {} + self._lazy_setfields = [] # cached array items: {descr: CachedArrayItems} self.cached_arrayitems = {} - # lazily written setfields (at most one per descr): {descr: op} - self.lazy_setfields = {} - self.lazy_setfields_descrs = [] # keys (at least) of previous dict def reconstruct_for_next_iteration(self, optimizer, valuemap): new = OptHeap() if True: self.force_all_lazy_setfields() - assert not self.lazy_setfields_descrs - assert not self.lazy_setfields else: - new.lazy_setfields_descrs = self.lazy_setfields_descrs - new.lazy_setfields = self.lazy_setfields + assert 0 # was: new.lazy_setfields = self.lazy_setfields for descr, d in self.cached_fields.items(): - newd = {} - new.cached_fields[descr] = newd - for value, fieldvalue in d.items(): - newd[value.get_cloned(optimizer, valuemap)] = \ - fieldvalue.get_cloned(optimizer, valuemap) - - for descr, d in self.known_heap_fields.items(): - newd = {} - new.known_heap_fields[descr] = newd - for value, fieldvalue in d.items(): - newd[value.get_cloned(optimizer, valuemap)] = \ - fieldvalue.get_cloned(optimizer, valuemap) - + new.cached_fields[descr] = d.get_cloneded(optimizer, valuemap) + new.cached_arrayitems = {} for descr, d in self.cached_arrayitems.items(): newd = {} @@ -74,30 +151,16 @@ return new def clean_caches(self): + del self._lazy_setfields[:] self.cached_fields.clear() - self.known_heap_fields.clear() self.cached_arrayitems.clear() - def cache_field_value(self, descr, value, fieldvalue, write=False): - if write: - # when seeing a setfield, we have to clear the cache for the same - # field on any other structure, just in case they are aliasing - # each other - d = self.cached_fields[descr] = {} - else: - d = self.cached_fields.setdefault(descr, {}) - d[value] = fieldvalue - - def read_cached_field(self, descr, value): - # XXX self.cached_fields and self.lazy_setfields should probably - # be merged somehow - d = self.cached_fields.get(descr, None) - if d is None: - op = self.lazy_setfields.get(descr, None) - if op is None: - return None - return self.getvalue(op.getarg(1)) - return d.get(value, None) + def field_cache(self, descr): + try: + cf = self.cached_fields[descr] + except KeyError: + cf = self.cached_fields[descr] = CachedField() + return cf def cache_arrayitem_value(self, descr, value, indexvalue, fieldvalue, write=False): d = self.cached_arrayitems.get(descr, None) @@ -157,11 +220,15 @@ self.optimizer.pendingfields = self.force_lazy_setfields_for_guard() return opnum = op.getopnum() - if (opnum == rop.SETFIELD_GC or - opnum == rop.SETFIELD_RAW or - opnum == rop.SETARRAYITEM_GC or - opnum == rop.SETARRAYITEM_RAW or - opnum == rop.DEBUG_MERGE_POINT): + if (opnum == rop.SETFIELD_GC or # handled specially + opnum == rop.SETFIELD_RAW or # no effect on GC struct/array + opnum == rop.SETARRAYITEM_GC or # handled specially + opnum == rop.SETARRAYITEM_RAW or # no effect on GC struct + opnum == rop.STRSETITEM or # no effect on GC struct/array + opnum == rop.UNICODESETITEM or # no effect on GC struct/array + opnum == rop.DEBUG_MERGE_POINT or # no effect whatsoever + opnum == rop.COPYSTRCONTENT or # no effect on GC struct/array + opnum == rop.COPYUNICODECONTENT): # no effect on GC struct/array return assert opnum != rop.CALL_PURE if (opnum == rop.CALL or @@ -179,8 +246,8 @@ for fielddescr in effectinfo.write_descrs_fields: self.force_lazy_setfield(fielddescr) try: - del self.cached_fields[fielddescr] - del self.known_heap_fields[fielddescr] + cf = self.cached_fields[fielddescr] + cf._cached_fields.clear() except KeyError: pass for arraydescr in effectinfo.write_descrs_arrays: @@ -194,10 +261,7 @@ # ^^^ we only need to force this field; the other fields # of virtualref_info and virtualizable_info are not gcptrs. return - self.force_all_lazy_setfields() - elif op.is_final() or (not we_are_translated() and - op.getopnum() < 0): # escape() operations - self.force_all_lazy_setfields() + self.force_all_lazy_setfields() self.clean_caches() @@ -205,58 +269,54 @@ assert value.is_constant() newvalue = self.getvalue(value.box) if value is not newvalue: - for d in self.cached_fields.values(): - if value in d: - d[newvalue] = d[value] - # FIXME: Update the other caches too? - - - def force_lazy_setfield(self, descr, before_guard=False): + for cf in self.cached_fields.itervalues(): + if value in cf._cached_fields: + cf._cached_fields[newvalue] = cf._cached_fields[value] + + def force_lazy_setfield(self, descr): try: - op = self.lazy_setfields[descr] + cf = self.cached_fields[descr] except KeyError: return - del self.lazy_setfields[descr] - value = self.getvalue(op.getarg(0)) - fieldvalue = self.getvalue(op.getarg(1)) - try: - heapvalue = self.known_heap_fields[op.getdescr()][value] - if fieldvalue is heapvalue: - return - except KeyError: - pass - self.next_optimization.propagate_forward(op) + cf.force_lazy_setfield(self) + def fixup_guard_situation(self): # hackish: reverse the order of the last two operations if it makes # sense to avoid a situation like "int_eq/setfield_gc/guard_true", # which the backend (at least the x86 backend) does not handle well. newoperations = self.optimizer.newoperations - if before_guard and len(newoperations) >= 2: - lastop = newoperations[-1] - prevop = newoperations[-2] - # - is_comparison() for cases like "int_eq/setfield_gc/guard_true" - # - CALL_MAY_FORCE: "call_may_force/setfield_gc/guard_not_forced" - # - is_ovf(): "int_add_ovf/setfield_gc/guard_no_overflow" - opnum = prevop.getopnum() - lastop_args = lastop.getarglist() - if ((prevop.is_comparison() or opnum == rop.CALL_MAY_FORCE - or prevop.is_ovf()) - and prevop.result not in lastop_args): - newoperations[-2] = lastop - newoperations[-1] = prevop + if len(newoperations) < 2: + return + lastop = newoperations[-1] + if (lastop.getopnum() != rop.SETFIELD_GC and + lastop.getopnum() != rop.SETARRAYITEM_GC): + return + # - is_comparison() for cases like "int_eq/setfield_gc/guard_true" + # - CALL_MAY_FORCE: "call_may_force/setfield_gc/guard_not_forced" + # - is_ovf(): "int_add_ovf/setfield_gc/guard_no_overflow" + prevop = newoperations[-2] + opnum = prevop.getopnum() + if not (prevop.is_comparison() or opnum == rop.CALL_MAY_FORCE + or prevop.is_ovf()): + return + if prevop.result in lastop.getarglist(): + return + newoperations[-2] = lastop + newoperations[-1] = prevop def force_all_lazy_setfields(self): - if len(self.lazy_setfields_descrs) > 0: - for descr in self.lazy_setfields_descrs: - self.force_lazy_setfield(descr) - del self.lazy_setfields_descrs[:] + for cf in self._lazy_setfields: + if not we_are_translated(): + assert cf in self.cached_fields.values() + cf.force_lazy_setfield(self) def force_lazy_setfields_for_guard(self): pendingfields = [] - for descr in self.lazy_setfields_descrs: - try: - op = self.lazy_setfields[descr] - except KeyError: + for cf in self._lazy_setfields: + if not we_are_translated(): + assert cf in self.cached_fields.values() + op = cf._lazy_setfield + if op is None: continue # the only really interesting case that we need to handle in the # guards' resume data is that of a virtual object that is stored @@ -266,41 +326,27 @@ fieldvalue = self.getvalue(op.getarg(1)) if fieldvalue.is_virtual(): # this is the case that we leave to resume.py - pendingfields.append((descr, value.box, + pendingfields.append((op.getdescr(), value.box, fieldvalue.get_key_box())) else: - self.force_lazy_setfield(descr, before_guard=True) + cf.force_lazy_setfield(self) + self.fixup_guard_situation() return pendingfields - def force_lazy_setfield_if_necessary(self, op, value, write=False): - try: - op1 = self.lazy_setfields[op.getdescr()] - except KeyError: - if write: - self.lazy_setfields_descrs.append(op.getdescr()) - else: - if self.getvalue(op1.getarg(0)) is not value: - self.force_lazy_setfield(op.getdescr()) - def optimize_GETFIELD_GC(self, op): - value = self.getvalue(op.getarg(0)) - self.force_lazy_setfield_if_necessary(op, value) - # check if the field was read from another getfield_gc just before - # or has been written to recently - fieldvalue = self.read_cached_field(op.getdescr(), value) + structvalue = self.getvalue(op.getarg(0)) + cf = self.field_cache(op.getdescr()) + fieldvalue = cf.getfield_from_cache(self, structvalue) if fieldvalue is not None: self.make_equal_to(op.result, fieldvalue) return # default case: produce the operation - value.ensure_nonnull() + structvalue.ensure_nonnull() ###self.optimizer.optimize_default(op) self.emit_operation(op) # then remember the result of reading the field fieldvalue = self.getvalue(op.result) - self.cache_field_value(op.getdescr(), value, fieldvalue) - # keep track of what's on the heap - d = self.known_heap_fields.setdefault(op.getdescr(), {}) - d[value] = fieldvalue + cf.remember_field_value(structvalue, fieldvalue) def optimize_SETFIELD_GC(self, op): if self.has_pure_result(rop.GETFIELD_GC_PURE, [op.getarg(0)], @@ -309,14 +355,8 @@ (op.getdescr().repr_of_descr())) raise BogusPureField # - value = self.getvalue(op.getarg(0)) - fieldvalue = self.getvalue(op.getarg(1)) - cached_fieldvalue = self.read_cached_field(op.getdescr(), value) - if fieldvalue is not cached_fieldvalue: - self.force_lazy_setfield_if_necessary(op, value, write=True) - self.lazy_setfields[op.getdescr()] = op - # remember the result of future reads of the field - self.cache_field_value(op.getdescr(), value, fieldvalue, write=True) + cf = self.field_cache(op.getdescr()) + cf.do_setfield(self, op) def optimize_GETARRAYITEM_GC(self, op): value = self.getvalue(op.getarg(0)) diff --git a/pypy/doc/config/translation.gctransformer.txt b/pypy/doc/config/translation.gctransformer.txt deleted file mode 100644 --- a/pypy/doc/config/translation.gctransformer.txt +++ /dev/null @@ -1,1 +0,0 @@ -internal option diff --git a/pypy/doc/config/objspace.timing.txt b/pypy/doc/config/objspace.timing.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.timing.txt +++ /dev/null @@ -1,1 +0,0 @@ -timing of various parts of the interpreter (simple profiling) diff --git a/pypy/doc/config/objspace.std.withtproxy.txt b/pypy/doc/config/objspace.std.withtproxy.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withtproxy.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable `transparent proxies`_. - -.. _`transparent proxies`: ../objspace-proxies.html#tproxy diff --git a/pypy/doc/config/translation.output.txt b/pypy/doc/config/translation.output.txt deleted file mode 100644 --- a/pypy/doc/config/translation.output.txt +++ /dev/null @@ -1,1 +0,0 @@ -Specify file name that the produced executable gets. diff --git a/pypy/doc/discussion/oz-thread-api.txt b/pypy/doc/discussion/oz-thread-api.txt deleted file mode 100644 --- a/pypy/doc/discussion/oz-thread-api.txt +++ /dev/null @@ -1,49 +0,0 @@ -Some rough notes about the Oz threading model -============================================= - -(almost verbatim from CTM) - -Scheduling ----------- - -Fair scheduling through round-robin. - -With priority levels : three queues exist, which manage high, medium, -low priority threads. The time slice ratio for these is -100:10:1. Threads inherit the priority of their parent. - -Mozart uses an external timer approach to implement thread preemption. - -Thread ops ----------- - -All these ops are defined in a Thread namespace/module. - -this() -> current thread's name (*not* another thread's name) -state(t) -> return state of t in {runnable, blocked, terminated} -suspend(t) : suspend t -resume(t) : resume execution of t -preempt(t) : preempt t -terminate(t) : terminate t immediately -injectException(t, e) : raise exception e in t -setPriority(t, p) : set t's priority to p - -Interestingly, coroutines can be build upon this thread -API. Coroutines have two ops : spawn and resume. - -spawn(p) -> creates a coroutine with procedure p, returns pid -resume(c) : transfers control from current coroutine to c - -The implementation of these ops in terms of the threads API is as -follows : - -def spawn(p): - in_thread: - pid = Thread.this() - Thread.suspend(pid) - p() - -def resume(cid): - Thread.resume cid - Thread.suspend(Thread.this()) - diff --git a/pypy/doc/faq.txt b/pypy/doc/faq.txt deleted file mode 100644 --- a/pypy/doc/faq.txt +++ /dev/null @@ -1,425 +0,0 @@ -========================== -Frequently Asked Questions -========================== - -.. contents:: - - -General -======= - -------------- -What is PyPy? -------------- - -PyPy is both: - - - a reimplementation of Python in Python, and - - - a framework for implementing interpreters and virtual machines for - programming languages, especially dynamic languages. - -PyPy tries to find new answers about ease of creation, flexibility, -maintainability and speed trade-offs for language implementations. -For further details see our `goal and architecture document`_ . - -.. _`goal and architecture document`: architecture.html - - -.. _`drop in replacement`: - ------------------------------------------- -Is PyPy a drop in replacement for CPython? ------------------------------------------- - -Almost! - -The mostly likely stumbling block for any given project is support for -`extension modules`_. PyPy supports a continually growing -number of extension modules, but so far mostly only those found in the -standard library. - -The language features (including builtin types and functions) are very -complete and well tested, so if your project does not use many -extension modules there is a good chance that it will work with PyPy. - -We list the differences we know about in `cpython_differences`_. - -There is also an experimental support for CPython extension modules, so -they'll run without change (from current observation, rather with little -change) on trunk. It has been a part of 1.4 release, but support is still -in alpha phase. - -.. _`extension modules`: cpython_differences.html#extension-modules -.. _`cpython_differences`: cpython_differences.html - --------------------------------- -On what platforms does PyPy run? --------------------------------- - -PyPy is regularly and extensively tested on Linux machines and on Mac -OS X and mostly works under Windows too (but is tested there less -extensively). PyPy needs a CPython running on the target platform to -bootstrap, as cross compilation is not really meant to work yet. -At the moment you need CPython 2.4 (with ctypes) or CPython 2.5 or 2.6 -for the translation process. PyPy's JIT requires an x86 or x86_64 CPU. - - ------------------------------------------------- -Which Python version (2.x?) does PyPy implement? ------------------------------------------------- - -PyPy currently aims to be fully compatible with Python 2.5. That means that -it contains the standard library of Python 2.5 and that it supports 2.5 -features (such as the with statement). - -.. _threading: - -------------------------------------------------- -Do threads work? What are the modules that work? -------------------------------------------------- - -Operating system-level threads basically work. If you enable the ``thread`` -module then PyPy will get support for GIL based threading. -Note that PyPy also fully supports `stackless-like -microthreads`_ (although both cannot be mixed yet). - -All pure-python modules should work, unless they rely on ugly -cpython implementation details, in which case it's their fault. -There is an increasing number of compatible CPython extensions working, -including things like wxPython or PIL. This is an ongoing development effort -to bring as many CPython extension modules working as possible. - -.. _`stackless-like microthreads`: stackless.html - - ------------------------------------- -Can I use CPython extension modules? ------------------------------------- - -Yes, but the feature is in alpha state and is available only on trunk -(not in the 1.2 release). However, we'll only ever support well-behaving -CPython extensions. Please consult PyPy developers on IRC or mailing list -for explanations if your favorite module works and how you can help to make -it happen in case it does not. - -We fully support ctypes-based extensions, however. - ------------------------------------------- -How do I write extension modules for PyPy? ------------------------------------------- - -See `Writing extension modules for PyPy`__. - -.. __: extending.html - - -.. _`slower than CPython`: -.. _`how fast is pypy`: - ------------------ -How fast is PyPy? ------------------ - -.. _whysoslow: - -In three words, PyPy is "kind of fast". In more than three -words, the answer to this question is hard to give as a single -number. The fastest PyPy available so far is clearly PyPy -`with a JIT included`_, optimized and translated to C. This -version of PyPy is "kind of fast" in the sense that there are -numerous examples of Python code that run *much faster* than -CPython, up to a large number of times faster. And there are -also examples of code that are just as slow as without the -JIT. A PyPy that does not include a JIT has performance that -is more predictable: it runs generally somewhere between 1 and -2 times slower than CPython, in the worst case up to 4 times -slower. - -Obtaining good measurements for the performance when run on -the CLI or JVM is difficult, but the JIT on the CLI `seems to -work nicely`__ too. - -.. __: http://codespeak.net/svn/user/antocuni/phd/thesis/thesis.pdf -.. _`with a JIT included`: jit/index.html - - -.. _`prolog and javascript`: - ----------------------------------------------------------------- -Can PyPy support interpreters for other languages beyond Python? ----------------------------------------------------------------- - -The toolsuite that translates the PyPy interpreter is quite -general and can be used to create optimized versions of interpreters -for any language, not just Python. Of course, these interpreters -can make use of the same features that PyPy brings to Python: -translation to various languages, stackless features, -garbage collection, implementation of various things like arbitrarily long -integers, etc. - -Currently, we have preliminary versions of a JavaScript interpreter -(Leonardo Santagada as his Summer of PyPy project), a `Prolog interpreter`_ -(Carl Friedrich Bolz as his Bachelor thesis), and a `SmallTalk interpreter`_ -(produced during a sprint). `All of them`_ are unfinished at the moment. - -.. _`Prolog interpreter`: http://codespeak.net/svn/pypy/lang/prolog/ -.. _`SmallTalk interpreter`: http://dx.doi.org/10.1007/978-3-540-89275-5_7 -.. _`All of them`: http://codespeak.net/svn/pypy/lang/ - - -Development -=========== - ------------------------------------------------------------ -How do I get into PyPy development? Can I come to sprints? ------------------------------------------------------------ - -Sure you can come to sprints! We always welcome newcomers and try to help them -get started in the project as much as possible (e.g. by providing tutorials and -pairing them with experienced PyPy developers). Newcomers should have some -Python experience and read some of the PyPy documentation before coming to a -sprint. - -Coming to a sprint is usually also the best way to get into PyPy development. -If you want to start on your own, take a look at the list of `project -suggestions`_. If you get stuck or need advice, `contact us`_. Usually IRC is -the most immediate way to get feedback (at least during some parts of the day; -many PyPy developers are in Europe) and the `mailing list`_ is better for long -discussions. - -.. _`project suggestions`: project-ideas.html -.. _`contact us`: index.html -.. _`mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev - ----------------------------------------------------------------------- -I am getting strange errors while playing with PyPy, what should I do? ----------------------------------------------------------------------- - -It seems that a lot of strange, unexplainable problems can be magically -solved by removing all the \*.pyc files from the PyPy source tree -(the script `py.cleanup`_ from py/bin will do that for you). -Another thing you can do is removing the directory pypy/_cache -completely. If the error is persistent and still annoys you after this -treatment please send us a bug report (or even better, a fix :-) - -.. _`py.cleanup`: http://codespeak.net/py/current/doc/bin.html - -------------------------------------------------------------- -OSError: ... cannot restore segment prot after reloc... Help? -------------------------------------------------------------- - -On Linux, if SELinux is enabled, you may get errors along the lines of -"OSError: externmod.so: cannot restore segment prot after reloc: Permission -denied." This is caused by a slight abuse of the C compiler during -configuration, and can be disabled by running the following command with root -privileges:: - - # setenforce 0 - -This will disable SELinux's protection and allow PyPy to configure correctly. -Be sure to enable it again if you need it! - - -PyPy translation tool chain -=========================== - ----------------------------------------- -Can PyPy compile normal Python programs? ----------------------------------------- - -No, PyPy is not a Python compiler. - -In Python, it is mostly impossible to *prove* anything about the types -that a program will manipulate by doing a static analysis. It should be -clear if you are familiar with Python, but if in doubt see [BRETT]_. - -What could be attempted is static "soft typing", where you would use a -whole bunch of heuristics to guess what types are probably going to show -up where. In this way, you could compile the program into two copies of -itself: a "fast" version and a "slow" version. The former would contain -many guards that allow it to fall back to the latter if needed. That -would be a wholly different project than PyPy, though. (As far as we -understand it, this is the approach that the LLVM__ group would like to -see LLVM used for, so if you feel like working very hard and attempting -something like this, check with them.) - -.. __: http://llvm.org/ - -What PyPy contains is, on the one hand, an non-soft static type -inferencer for RPython, which is a sublanguage that we defined just so -that it's possible and not too hard to do that; and on the other hand, -for the full Python language, we have an interpreter, and a JIT -generator which can produce a Just-In-Time Compiler from the -interpreter. The resulting JIT works for the full Python language in a -way that doesn't need type inference at all. - -For more motivation and details about our approach see also [D05.1]_, -section 3. - -.. [BRETT] Brett Cannon, - Localized Type Inference of Atomic Types in Python, - http://www.ocf.berkeley.edu/~bac/thesis.pdf - -.. [D05.1] Compiling Dynamic Language Implementations, - Report from the PyPy project to the E.U., - http://codespeak.net/svn/pypy/extradoc/eu-report/D05.1_Publish_on_translating_a_very-high-level_description.pdf - -.. _`PyPy's RPython`: - ------------------------------- -What is this RPython language? ------------------------------- - -RPython is a restricted subset of the Python language. It is used for -implementing dynamic language interpreters within the PyPy framework. The -restrictions are to ensure that type inference (and so, ultimately, translation -to other languages) of RPython programs is possible. These restrictions only -apply after the full import happens, so at import time arbitrary Python code can -be executed. - -The property of "being RPython" always applies to a full program, not to single -functions or modules (the translation tool chain does a full program analysis). -"Full program" in the context of "being RPython" is all the code reachable from -an "entry point" function. The translation toolchain follows all calls -recursively and discovers what belongs to the program and what not. - -The restrictions that apply to programs to be RPython mostly limit the ability -of mixing types in arbitrary ways. RPython does not allow the usage of two -different types in the same variable. In this respect (and in some others) it -feels a bit like Java. Other features not allowed in RPython are the usage of -special methods (``__xxx__``) except ``__init__`` and ``__del__``, and the -usage of reflection capabilities (e.g. ``__dict__``). - -Most existing standard library modules are not RPython, except for -some functions in ``os``, ``math`` and ``time`` that are natively -supported. In general it is quite unlikely that an existing Python -program is by chance RPython; it is most likely that it would have to be -heavily rewritten. -To read more about the RPython limitations read the `RPython description`_. - -.. _`RPython description`: coding-guide.html#restricted-python - ---------------------------------------------------------------- -Does RPython have anything to do with Zope's Restricted Python? ---------------------------------------------------------------- - -No. `Zope's RestrictedPython`_ aims to provide a sandboxed -execution environment for CPython. `PyPy's RPython`_ is the implementation -language for dynamic language interpreters. However, PyPy also provides -a robust `sandboxed Python Interpreter`_. - -.. _`sandboxed Python Interpreter`: sandbox.html -.. _`Zope's RestrictedPython`: http://pypi.python.org/pypi/RestrictedPython - -------------------------------------------------------------------------- -Can I use PyPy and RPython to compile smaller parts of my Python program? -------------------------------------------------------------------------- - -No. That would be possible, and we played with early attempts in that -direction, but there are many delicate issues: for example, how the -compiled and the non-compiled parts exchange data. Supporting this in a -nice way would be a lot of work. - -PyPy is certainly a good starting point for someone that would like to -work in that direction. Early attempts were dropped because they -conflicted with refactorings that we needed in order to progress on the -rest of PyPy; the currently active developers of PyPy have different -priorities. If someone wants to start working in that direction I -imagine that he might get a (very little) bit of support from us, -though. - -Alternatively, it's possible to write a mixed-module, i.e. an extension -module for PyPy in RPython, which you can then import from your Python -program when it runs on top of PyPy. This is similar to writing a C -extension module for CPython in term of investment of effort (without -all the INCREF/DECREF mess, though). - ------------------------------------------------------- -What's the ``"NOT_RPYTHON"`` I see in some docstrings? ------------------------------------------------------- - -If you put "NOT_RPYTHON" into the docstring of a function and that function is -found while trying to translate an RPython program, the translation process -stops and reports this as an error. You can therefore mark functions as -"NOT_RPYTHON" to make sure that they are never analyzed. - - -------------------------------------------------------------------- -Couldn't we simply take a Python syntax tree and turn it into Lisp? -------------------------------------------------------------------- - -It's not necessarily nonsense, but it's not really The PyPy Way. It's -pretty hard, without some kind of type inference, to translate, say this -Python:: - - a + b - -into anything significantly more efficient than this Common Lisp:: - - (py:add a b) - -And making type inference possible is what RPython is all about. - -You could make ``#'py:add`` a generic function and see if a given CLOS -implementation is fast enough to give a useful speed (but I think the -coercion rules would probably drive you insane first). -- mwh - --------------------------------------------- -Do I have to rewrite my programs in RPython? --------------------------------------------- - -No. PyPy always runs your code in its own interpreter, which is a -full and compliant Python 2.5 interpreter. RPython_ is only the -language in which parts of PyPy itself are written and extension -modules for it. The answer to whether something needs to be written as -an extension module, apart from the "gluing to external libraries" reason, will -change over time as speed for normal Python code improves. - -------------------------- -Which backends are there? -------------------------- - -Currently, there are backends for C_, the CLI_, and the JVM_. -All of these can translate the entire PyPy interpreter. -To learn more about backends take a look at the `translation document`_. - -.. _C: translation.html#the-c-back-end -.. _CLI: cli-backend.html -.. _JVM: translation.html#genjvm -.. _`translation document`: translation.html - ----------------------- -How do I compile PyPy? ----------------------- - -See the `getting-started`_ guide. - -.. _`how do I compile my own interpreters`: - -------------------------------------- -How do I compile my own interpreters? -------------------------------------- - -Start from the example of -`pypy/translator/goal/targetnopstandalone.py`_, which you compile by -typing:: - - python translate.py targetnopstandalone - -You can have a look at intermediate C source code, which is (at the -moment) put in ``/tmp/usession-*/testing_1/testing_1.c``. Of course, -all the functions and stuff used directly and indirectly by your -``entry_point()`` function has to be RPython_. - - -.. _`RPython`: coding-guide.html#rpython -.. _`getting-started`: getting-started.html - -.. include:: _ref.txt - ----------------------------------------------------------- -Why does PyPy draw a Mandelbrot fractal while translating? ----------------------------------------------------------- - -Because it's fun. diff --git a/pypy/doc/config/objspace.usemodules.exceptions.txt b/pypy/doc/config/objspace.usemodules.exceptions.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.exceptions.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'exceptions' module. -This module is essential, included by default and should not be removed. diff --git a/pypy/doc/discussion/gc.txt b/pypy/doc/discussion/gc.txt deleted file mode 100644 --- a/pypy/doc/discussion/gc.txt +++ /dev/null @@ -1,77 +0,0 @@ - -*Note: this things are experimental and are being implemented on the -`io-improvements`_ branch* - -.. _`io-improvements`: http://codespeak.net/svn/pypy/branch/io-improvements - -============= -GC operations -============= - -This document tries to gather gc-related issues which are very recent -or in-development. Also, it tries to document needed gc refactorings -and expected performance of certain gc-related operations. - -Problem area -============ - -Since some of our gcs are moving, we at some point decided to simplify -the issue of having care of it by always copying the contents of -data that goes to C level. This yields a performance penalty, also -because some gcs does not move data around anyway. - -So we decided to introduce new operations which will simplify issues -regarding this. - -Pure gc operations -================== - -(All available from rlib.rgc) - -* can_move(p) - returns a flag telling whether pointer p will move. - useful for example when you want to know whether memcopy is safe. - -* malloc_nonmovable(TP, n=None) - tries to allocate non-moving object. - if it succeeds, it return an object, otherwise (for whatever reasons) - returns null pointer. Does not raise! (never) - -Usage patterns -============== - -Usually those functions are used via helpers located in rffi. For things like -os.write - first get_nonmovingbuffer(data) that will give you a pointer -suitable of passing to C and finally free_nonmovingbuffer. - -For os.read like usage - you first call alloc_buffer (that will allocate a -buffer of desired size passable to C) and afterwards create str_from_buffer, -finally calling keep_buffer_alive_until_here. - -String builder -============== - -In Python strings are immutable by design. In RPython this still yields true, -but since we cooperate with lower (C/POSIX) level, which has no notion of -strings, we use buffers. Typical use case is to use list of characters l and -than ''.join(l) in order to get string. This requires a lot of unnecessary -copying, which yields performance penalty for such operations as string -formatting. Hence the idea of string builder. String builder would be an -object to which you can append strings or characters and afterwards build it -to a string. Ideally, this set of operations would not contain any copying -whatsoever. - -Low level gc operations for string builder ------------------------------------------- - -* alloc_buffer(T, size) - allocates Array(nolength=True) with possibility - of later becoming of shape T - -* realloc_buffer(buf, newsize) - tries to shrink or enlarge buffer buf. Returns - new pointer (since it might involve copying) - -* build_buffer(T, buf) - creates a type T (previously passed to alloc_buffer) - from buffer. - -Depending on a gc, those might be implemented dumb (realloc always copies) -or using C-level realloc. Might be implemented also in whatever clever way -comes to mind. - diff --git a/pypy/doc/config/translation.taggedpointers.txt b/pypy/doc/config/translation.taggedpointers.txt deleted file mode 100644 --- a/pypy/doc/config/translation.taggedpointers.txt +++ /dev/null @@ -1,3 +0,0 @@ -Enable tagged pointers. This option is mostly useful for the Smalltalk and -Prolog interpreters. For the Python interpreter the option -:config:`objspace.std.withsmallint` should be used. diff --git a/pypy/doc/config/objspace.std.sharesmallstr.txt b/pypy/doc/config/objspace.std.sharesmallstr.txt deleted file mode 100644 diff --git a/pypy/doc/config/objspace.usemodules._locale.txt b/pypy/doc/config/objspace.usemodules._locale.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._locale.txt +++ /dev/null @@ -1,3 +0,0 @@ -Use the '_locale' module. -This module runs _locale written in RPython (instead of ctypes version). -It's not really finished yet; it's enabled by default on Windows. diff --git a/pypy/jit/metainterp/compile.py b/pypy/jit/metainterp/compile.py --- a/pypy/jit/metainterp/compile.py +++ b/pypy/jit/metainterp/compile.py @@ -39,7 +39,10 @@ def create_empty_loop(metainterp, name_prefix=''): name = metainterp.staticdata.stats.name_for_new_loop() - return TreeLoop(name_prefix + name) + loop = TreeLoop(name_prefix + name) + loop.call_pure_results = metainterp.call_pure_results + return loop + def make_loop_token(nb_args, jitdriver_sd): loop_token = LoopToken() @@ -86,6 +89,8 @@ """Try to compile a new loop by closing the current history back to the first operation. """ + from pypy.jit.metainterp.optimize import optimize_loop + loop = create_empty_loop(metainterp) loop.inputargs = inputargs for box in loop.inputargs: @@ -105,8 +110,8 @@ loop.inputvalues = inputvalues try: - old_loop_token = jitdriver_sd.warmstate.optimize_loop( - metainterp_sd, old_loop_tokens, loop) + old_loop_token = optimize_loop(metainterp_sd, old_loop_tokens, loop, + jitdriver_sd.warmstate.enable_opts) except InvalidLoop: return None if old_loop_token is not None: @@ -570,6 +575,8 @@ """Try to compile a new bridge leading from the beginning of the history to some existing place. """ + from pypy.jit.metainterp.optimize import optimize_bridge + # The history contains new operations to attach as the code for the # failure of 'resumekey.guard_op'. # @@ -586,10 +593,9 @@ else: inline_short_preamble = True try: - target_loop_token = state.optimize_bridge(metainterp_sd, - old_loop_tokens, new_loop, - inline_short_preamble, - retrace) + target_loop_token = optimize_bridge(metainterp_sd, old_loop_tokens, + new_loop, state.enable_opts, + inline_short_preamble, retrace) except InvalidLoop: # XXX I am fairly convinced that optimize_bridge cannot actually raise # InvalidLoop diff --git a/pypy/doc/config/translation.log.txt b/pypy/doc/config/translation.log.txt deleted file mode 100644 --- a/pypy/doc/config/translation.log.txt +++ /dev/null @@ -1,5 +0,0 @@ -Include debug prints in the translation. - -These must be enabled by setting the PYPYLOG environment variable. -The exact set of features supported by PYPYLOG is described in -pypy/translation/c/src/debug.h. diff --git a/pypy/doc/config/translation.profopt.txt b/pypy/doc/config/translation.profopt.txt deleted file mode 100644 --- a/pypy/doc/config/translation.profopt.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use GCCs profile-guided optimizations. This option specifies the the -arguments with which to call pypy-c (and in general the translated -RPython program) to gather profile data. Example for pypy-c: "-c 'from -richards import main;main(); from test import pystone; -pystone.main()'" diff --git a/pypy/doc/config/objspace.usemodules.rbench.txt b/pypy/doc/config/objspace.usemodules.rbench.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rbench.txt +++ /dev/null @@ -1,4 +0,0 @@ -Use the built-in 'rbench' module. -This module contains geninterpreted versions of pystone and richards, -so it is useful to measure the interpretation overhead of the various -pypy-\*. diff --git a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt b/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.profile_based_inline_threshold.txt +++ /dev/null @@ -1,2 +0,0 @@ -Weight threshold used to decide whether to inline flowgraphs. -This is for profile-based inlining (:config:`translation.backendopt.profile_based_inline`). diff --git a/pypy/doc/getting-started-dev.txt b/pypy/doc/getting-started-dev.txt deleted file mode 100644 --- a/pypy/doc/getting-started-dev.txt +++ /dev/null @@ -1,425 +0,0 @@ -=============================================================================== -PyPy - Getting Started with the Translation Toolchain and Development Process -=============================================================================== - -.. contents:: -.. sectnum:: - -.. _`try out the translator`: - -Trying out the translator -------------------------- - -The translator is a tool based on the PyPy interpreter which can translate -sufficiently static Python programs into low-level code (in particular it can -be used to translate the `full Python interpreter`_). To be able to use it -you need to (if you want to look at the flowgraphs, which you obviously -should): - - * Download and install Pygame_. - - * Download and install `Dot Graphviz`_ (optional if you have an internet - connection: the flowgraph viewer then connects to - codespeak.net and lets it convert the flowgraph by a graphviz server). - -To start the interactive translator shell do:: - - cd pypy - python bin/translatorshell.py - -Test snippets of translatable code are provided in the file -``pypy/translator/test/snippet.py``, which is imported under the name -``snippet``. For example:: - - >>> t = Translation(snippet.is_perfect_number) - >>> t.view() - -After that, the graph viewer pops up, that lets you interactively inspect the -flow graph. To move around, click on something that you want to inspect. -To get help about how to use it, press 'H'. To close it again, press 'Q'. - -Trying out the type annotator -+++++++++++++++++++++++++++++ - -We have a type annotator that can completely infer types for functions like -``is_perfect_number`` (as well as for much larger examples):: - - >>> t.annotate([int]) - >>> t.view() - -Move the mouse over variable names (in red) to see their inferred types. - - -Translating the flow graph to C code -++++++++++++++++++++++++++++++++++++ - -The graph can be turned into C code:: - - >>> t.rtype() - >>> f = t.compile_c() - -The first command replaces the operations with other low level versions that -only use low level types that are available in C (e.g. int). To try out the -compiled version:: - - >>> f(5) - False - >>> f(6) - True - -Translating the flow graph to CLI or JVM code -+++++++++++++++++++++++++++++++++++++++++++++ - -PyPy also contains a `CLI backend`_ and JVM backend which -can translate flow graphs into .NET executables or a JVM jar -file respectively. Both are able to translate the entire -interpreter. You can try out the CLI and JVM backends -from the interactive translator shells as follows:: - - >>> def myfunc(a, b): return a+b - ... - >>> t = Translation(myfunc) - >>> t.annotate([int, int]) - >>> f = t.compile_cli() # or compile_jvm() - >>> f(4, 5) - 9 - -The object returned by ``compile_cli`` or ``compile_jvm`` -is a wrapper around the real -executable: the parameters are passed as command line arguments, and -the returned value is read from the standard output. - -Once you have compiled the snippet, you can also try to launch the -executable directly from the shell. You will find the -executable in one of the ``/tmp/usession-*`` directories:: - - # For CLI: - $ mono /tmp/usession-trunk-/main.exe 4 5 - 9 - - # For JVM: - $ java -cp /tmp/usession-trunk-/pypy pypy.Main 4 5 - 9 - -To translate and run for the CLI you must have the SDK installed: Windows -users need the `.NET Framework SDK 2.0`_, while Linux and Mac users -can use Mono_. To translate and run for the JVM you must have a JDK -installed (at least version 5) and ``java``/``javac`` on your path. - -A slightly larger example -+++++++++++++++++++++++++ - -There is a small-to-medium demo showing the translator and the annotator:: - - cd demo - ../pypy/translator/goal/translate.py --view --annotate bpnn.py - -This causes ``bpnn.py`` to display itself as a call graph and class -hierarchy. Clicking on functions shows the flow graph of the particular -function. Clicking on a class shows the attributes of its instances. All -this information (call graph, local variables' types, attributes of -instances) is computed by the annotator. - -To turn this example to C code (compiled to the executable ``bpnn-c``), -type simply:: - - ../pypy/translator/goal/translate.py bpnn.py - - -Translating Full Programs -+++++++++++++++++++++++++ - -To translate full RPython programs, there is the script ``translate.py`` in -``translator/goal``. Examples for this are a slightly changed version of -Pystone:: - - cd pypy/translator/goal - python translate.py targetrpystonedalone - -This will produce the executable "targetrpystonedalone-c". - -The largest example of this process is to translate the `full Python -interpreter`_. There is also an FAQ about how to set up this process for `your -own interpreters`_. - -.. _`your own interpreters`: faq.html#how-do-i-compile-my-own-interpreters - -.. _`start reading sources`: - -Where to start reading the sources ----------------------------------- - -PyPy is made from parts that are relatively independent from each other. -You should start looking at the part that attracts you most (all paths are -relative to the PyPy top level directory). You may look at our `directory reference`_ -or start off at one of the following points: - -* `pypy/interpreter`_ contains the bytecode interpreter: bytecode dispatcher - in pyopcode.py_, frame and code objects in eval.py_ and pyframe.py_, - function objects and argument passing in function.py_ and argument.py_, - the object space interface definition in baseobjspace.py_, modules in - module.py_ and mixedmodule.py_. Core types supporting the bytecode - interpreter are defined in typedef.py_. - -* `pypy/interpreter/pyparser`_ contains a recursive descent parser, - and input data files that allow it to parse both Python 2.3 and 2.4 - syntax. Once the input data has been processed, the parser can be - translated by the above machinery into efficient code. - -* `pypy/interpreter/astcompiler`_ contains the compiler. This - contains a modified version of the compiler package from CPython - that fixes some bugs and is translatable. That the compiler and - parser are translatable is new in 0.8.0 and it makes using the - resulting binary interactively much more pleasant. - -* `pypy/objspace/std`_ contains the `Standard object space`_. The main file - is objspace.py_. For each type, the files ``xxxtype.py`` and - ``xxxobject.py`` contain respectively the definition of the type and its - (default) implementation. - -* `pypy/objspace`_ contains a few other object spaces: the thunk_, - trace_ and flow_ object spaces. The latter is a relatively short piece - of code that builds the control flow graphs when the bytecode interpreter - runs in it. - -* `pypy/translator`_ contains the code analysis and generation stuff. - Start reading from translator.py_, from which it should be easy to follow - the pieces of code involved in the various translation phases. - -* `pypy/annotation`_ contains the data model for the type annotation that - can be inferred about a graph. The graph "walker" that uses this is in - `pypy/annotation/annrpython.py`_. - -* `pypy/rpython`_ contains the code of the RPython typer. The typer transforms - annotated flow graphs in a way that makes them very similar to C code so - that they can be easy translated. The graph transformations are controlled - by the stuff in `pypy/rpython/rtyper.py`_. The object model that is used can - be found in `pypy/rpython/lltypesystem/lltype.py`_. For each RPython type - there is a file rxxxx.py that contains the low level functions needed for - this type. - -* `pypy/rlib`_ contains the RPython standard library, things that you can - use from rpython. - -.. _optionaltool: - - -Running PyPy's unit tests -------------------------- - -PyPy development always was and is still thorougly test-driven. -We use the flexible `py.test testing tool`_ which you can `install independently -`_ and use indepedently -from PyPy for other projects. - -The PyPy source tree comes with an inlined version of ``py.test`` -which you can invoke by typing:: - - python pytest.py -h - -This is usually equivalent to using an installed version:: - - py.test -h - -If you encounter problems with the installed version -make sure you have the correct version installed which -you can find out with the ``--version`` switch. - -Now on to running some tests. PyPy has many different test directories -and you can use shell completion to point at directories or files:: - - py.test pypy/interpreter/test/test_pyframe.py - - # or for running tests of a whole subdirectory - py.test pypy/interpreter/ - -See `py.test usage and invocations`_ for some more generic info -on how you can run tests. - -Beware trying to run "all" pypy tests by pointing to the root -directory or even the top level subdirectory ``pypy``. It takes -hours and uses huge amounts of RAM and is not recommended. - -To run CPython regression tests you can point to the ``lib-python`` -directory:: - - py.test lib-python/2.7.0/test/test_datetime.py - -This will usually take a long time because this will run -the PyPy Python interpreter on top of CPython. On the plus -side, it's usually still faster than doing a full translation -and running the regression test with the translated PyPy Python -interpreter. - -.. _`py.test testing tool`: http://pytest.org -.. _`py.test usage and invocations`: http://pytest.org/usage.html#usage - -Special Introspection Features of the Untranslated Python Interpreter ---------------------------------------------------------------------- - -If you are interested in the inner workings of the PyPy Python interpreter, -there are some features of the untranslated Python interpreter that allow you -to introspect its internals. - -Interpreter-level console -+++++++++++++++++++++++++ - -If you start an untranslated Python interpreter via:: - - python pypy-svn/pypy/bin/py.py - -If you press - on the console you enter the interpreter-level console, a -usual CPython console. You can then access internal objects of PyPy -(e.g. the `object space`_) and any variables you have created on the PyPy -prompt with the prefix ``w_``:: - - >>>> a = 123 - >>>> - *** Entering interpreter-level console *** - >>> w_a - W_IntObject(123) - -The mechanism works in both directions. If you define a variable with the ``w_`` prefix on the interpreter-level, you will see it on the app-level:: - - >>> w_l = space.newlist([space.wrap(1), space.wrap("abc")]) - >>> - *** Leaving interpreter-level console *** - - KeyboardInterrupt - >>>> l - [1, 'abc'] - -.. _`object space`: objspace.html - -Note that the prompt of the interpreter-level console is only '>>>' since -it runs on CPython level. If you want to return to PyPy, press (under -Linux) or , (under Windows). - -You may be interested in reading more about the distinction between -`interpreter-level and app-level`_. - -.. _`interpreter-level and app-level`: coding-guide.html#interpreter-level - -.. _`trace example`: - -Tracing bytecode and operations on objects -++++++++++++++++++++++++++++++++++++++++++ - -You can use the trace object space to monitor the interpretation -of bytecodes in connection with object space operations. To enable -it, set ``__pytrace__=1`` on the interactive PyPy console:: - - >>>> __pytrace__ = 1 - Tracing enabled - >>>> a = 1 + 2 - |- <<<< enter a = 1 + 2 @ 1 >>>> - |- 0 LOAD_CONST 0 (W_IntObject(1)) - |- 3 LOAD_CONST 1 (W_IntObject(2)) - |- 6 BINARY_ADD - |- add(W_IntObject(1), W_IntObject(2)) -> W_IntObject(3) - |- 7 STORE_NAME 0 (a) - |- hash(W_StringObject('a')) -> W_IntObject(-468864544) - |- int_w(W_IntObject(-468864544)) -> -468864544 - |-10 LOAD_CONST 2 () - |-13 RETURN_VALUE - |- <<<< leave a = 1 + 2 @ 1 >>>> - -Demos -------- - -The `demo/`_ directory contains examples of various aspects of PyPy, -ranging from running regular Python programs (that we used as compliance goals) -over experimental distribution mechanisms to examples translating -sufficiently static programs into low level code. - -Additional Tools for running (and hacking) PyPy ------------------------------------------------ - -We use some optional tools for developing PyPy. They are not required to run -the basic tests or to get an interactive PyPy prompt but they help to -understand and debug PyPy especially for the translation process. - -graphviz & pygame for flow graph viewing (highly recommended) -+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++ - -graphviz and pygame are both necessary if you -want to look at generated flow graphs: - - graphviz: http://www.graphviz.org/Download.php - - pygame: http://www.pygame.org/download.shtml - -CTypes on Python 2.4 -++++++++++++++++++++++++++++ - -`ctypes`_ is included in CPython 2.5 and higher. CPython 2.4 users needs to -install it if they want to run low-level tests. See -the `download page of ctypes`_. - -.. _`download page of ctypes`: http://sourceforge.net/project/showfiles.php?group_id=71702 -.. _`ctypes`: http://starship.python.net/crew/theller/ctypes/ - -.. _`py.test`: - -py.test and the py lib -+++++++++++++++++++++++ - -The `py.test testing tool`_ drives all our testing needs. - -We use the `py library`_ for filesystem path manipulations, terminal -writing, logging and some other support functionality. - -You don't neccessarily need to install these two libraries because -we also ship them inlined in the PyPy source tree. - -Getting involved ------------------ - -PyPy employs an open development process. You are invited to join our -`pypy-dev mailing list`_ or look at the other `contact -possibilities`_. Usually we give out commit rights fairly liberally, so if you -want to do something with PyPy, you can become a committer. We are also doing -coding Sprints which are -separately announced and often happen around Python conferences such -as EuroPython or Pycon. Upcoming events are usually announced on `the blog`_. - -.. _`full Python interpreter`: getting-started-python.html -.. _`the blog`: http://morepypy.blogspot.com -.. _`pypy-dev mailing list`: http://codespeak.net/mailman/listinfo/pypy-dev -.. _`contact possibilities`: index.html - -.. _`py library`: http://pylib.org - -.. _`Spidermonkey`: http://www.mozilla.org/js/spidermonkey/ - -.. _`.NET Framework SDK 2.0`: http://msdn.microsoft.com/netframework/downloads/updates/default.aspx -.. _Mono: http://www.mono-project.com/Main_Page -.. _`CLI backend`: cli-backend.html -.. _clr: clr-module.html - -.. _`Dot Graphviz`: http://www.graphviz.org/ -.. _Pygame: http://www.pygame.org/ -.. _pyopcode.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyopcode.py -.. _eval.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/eval.py -.. _pyframe.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/pyframe.py -.. _function.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/function.py -.. _argument.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/argument.py -.. _baseobjspace.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/baseobjspace.py -.. _module.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/module.py -.. _mixedmodule.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/mixedmodule.py -.. _typedef.py: http://codespeak.net/svn/pypy/trunk/pypy/interpreter/typedef.py -.. _Standard object space: objspace.html#the-standard-object-space -.. _objspace.py: ../../pypy/objspace/std/objspace.py -.. _thunk: ../../pypy/objspace/thunk.py -.. _trace: ../../pypy/objspace/trace.py -.. _flow: ../../pypy/objspace/flow/ -.. _translator.py: ../../pypy/translator/translator.py -.. _mailing lists: index.html -.. _documentation: docindex.html -.. _unit tests: coding-guide.html#test-design - -.. _`directory reference`: docindex.html#directory-reference - -.. include:: _ref.txt - diff --git a/pypy/doc/discussion/finalizer-order.txt b/pypy/doc/discussion/finalizer-order.txt deleted file mode 100644 --- a/pypy/doc/discussion/finalizer-order.txt +++ /dev/null @@ -1,166 +0,0 @@ -Ordering finalizers in the SemiSpace GC -======================================= - -Goal ----- - -After a collection, the SemiSpace GC should call the finalizers on -*some* of the objects that have one and that have become unreachable. -Basically, if there is a reference chain from an object a to an object b -then it should not call the finalizer for b immediately, but just keep b -alive and try again to call its finalizer after the next collection. - -This basic idea fails when there are cycles. It's not a good idea to -keep the objects alive forever or to never call any of the finalizers. -The model we came up with is that in this case, we could just call the -finalizer of one of the objects in the cycle -- but only, of course, if -there are no other objects outside the cycle that has a finalizer and a -reference to the cycle. - -More precisely, given the graph of references between objects:: - - for each strongly connected component C of the graph: - if C has at least one object with a finalizer: - if there is no object outside C which has a finalizer and - indirectly references the objects in C: - mark one of the objects of C that has a finalizer - copy C and all objects it references to the new space - - for each marked object: - detach the finalizer (so that it's not called more than once) - call the finalizer - -Algorithm ---------- - -During deal_with_objects_with_finalizers(), each object x can be in 4 -possible states:: - - state[x] == 0: unreachable - state[x] == 1: (temporary state, see below) - state[x] == 2: reachable from any finalizer - state[x] == 3: alive - -Initially, objects are in state 0 or 3 depending on whether they have -been copied or not by the regular sweep done just before. The invariant -is that if there is a reference from x to y, then state[y] >= state[x]. - -The state 2 is used for objects that are reachable from a finalizer but -that may be in the same strongly connected component than the finalizer. -The state of these objects goes to 3 when we prove that they can be -reached from a finalizer which is definitely not in the same strongly -connected component. Finalizers on objects with state 3 must not be -called. - -Let closure(x) be the list of objects reachable from x, including x -itself. Pseudo-code (high-level) to get the list of marked objects:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - for y in closure(x): - if state[y] == 0: - state[y] = 2 - elif state[y] == 2: - state[y] = 3 - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -This does the right thing independently on the order in which the -objects_with_finalizers are enumerated. First assume that [x1, .., xn] -are all in the same unreachable strongly connected component; no object -with finalizer references this strongly connected component from -outside. Then: - -* when x1 is processed, state[x1] == .. == state[xn] == 0 independently - of whatever else we did before. So x1 gets marked and we set - state[x1] = .. = state[xn] = 2. - -* when x2, ... xn are processed, their state is != 0 so we do nothing. - -* in the final loop, only x1 is marked and state[x1] == 2 so it stays - marked. - -Now, let's assume that x1 and x2 are not in the same strongly connected -component and there is a reference path from x1 to x2. Then: - -* if x1 is enumerated before x2, then x2 is in closure(x1) and so its - state gets at least >= 2 when we process x1. When we process x2 later - we just skip it ("continue" line) and so it doesn't get marked. - -* if x2 is enumerated before x1, then when we process x2 we mark it and - set its state to >= 2 (before x2 is in closure(x2)), and then when we - process x1 we set state[x2] == 3. So in the final loop x2 gets - removed from the "marked" list. - -I think that it proves that the algorithm is doing what we want. - -The next step is to remove the use of closure() in the algorithm in such -a way that the new algorithm has a reasonable performance -- linear in -the number of objects whose state it manipulates:: - - marked = [] - for x in objects_with_finalizers: - if state[x] != 0: - continue - marked.append(x) - recursing on the objects y starting from x: - if state[y] == 0: - state[y] = 1 - follow y's children recursively - elif state[y] == 2: - state[y] = 3 - follow y's children recursively - else: - don't need to recurse inside y - recursing on the objects y starting from x: - if state[y] == 1: - state[y] = 2 - follow y's children recursively - else: - don't need to recurse inside y - for x in marked: - assert state[x] >= 2 - if state[x] != 2: - marked.remove(x) - -In this algorithm we follow the children of each object at most 3 times, -when the state of the object changes from 0 to 1 to 2 to 3. In a visit -that doesn't change the state of an object, we don't follow its children -recursively. - -In practice, in the SemiSpace, Generation and Hybrid GCs, we can encode -the 4 states with a single extra bit in the header: - - ===== ============= ======== ==================== - state is_forwarded? bit set? bit set in the copy? - ===== ============= ======== ==================== - 0 no no n/a - 1 no yes n/a - 2 yes yes yes - 3 yes whatever no - ===== ============= ======== ==================== - -So the loop above that does the transition from state 1 to state 2 is -really just a copy(x) followed by scan_copied(). We must also clear the -bit in the copy at the end, to clean up before the next collection -(which means recursively bumping the state from 2 to 3 in the final -loop). - -In the MiniMark GC, the objects don't move (apart from when they are -copied out of the nursery), but we use the flag GCFLAG_VISITED to mark -objects that survive, so we can also have a single extra bit for -finalizers: - - ===== ============== ============================ - state GCFLAG_VISITED GCFLAG_FINALIZATION_ORDERING - ===== ============== ============================ - 0 no no - 1 no yes - 2 yes yes - 3 yes no - ===== ============== ============================ diff --git a/pypy/doc/config/objspace.std.withdictmeasurement.txt b/pypy/doc/config/objspace.std.withdictmeasurement.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.withdictmeasurement.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/how-to-release.txt b/pypy/doc/how-to-release.txt deleted file mode 100644 --- a/pypy/doc/how-to-release.txt +++ /dev/null @@ -1,54 +0,0 @@ -Making a PyPy Release -======================= - -Overview ---------- - -As a meta rule setting up issues in the tracker for items here may help not -forgetting things. A set of todo files may also work. - -Check and prioritize all issues for the release, postpone some if necessary, -create new issues also as necessary. A meeting (or meetings) should be -organized to decide what things are priorities, should go in and work for -the release. - -An important thing is to get the documentation into an up-to-date state! - -Release Steps ----------------- - -* at code freeze make a release branch under - http://codepeak.net/svn/pypy/release/x.y(.z). IMPORTANT: bump the - pypy version number in module/sys/version.py and in - module/cpyext/include/patchlevel.h, notice that the branch - will capture the revision number of this change for the release; - some of the next updates may be done before or after branching; make - sure things are ported back to the trunk and to the branch as - necessary -* update pypy/doc/contributor.txt (and possibly LICENSE) -* update README -* go to pypy/tool/release and run: - force-builds.py /release/ -* wait for builds to complete, make sure there are no failures -* run pypy/tool/release/make_release.py, this will build necessary binaries - and upload them to pypy.org - - Following binaries should be built, however, we need more buildbots: - JIT: windows, linux, os/x - no JIT: windows, linux, os/x - sandbox: linux, os/x - stackless: windows, linux, os/x - -* write release announcement pypy/doc/release-x.y(.z).txt - the release announcement should contain a direct link to the download page -* update pypy.org (under extradoc/pypy.org), rebuild and commit - -* update http://codespeak.net/pypy/trunk: - code0> + chmod -R yourname:users /www/codespeak.net/htdocs/pypy/trunk - local> cd ..../pypy/doc && py.test - local> cd ..../pypy - local> rsync -az doc codespeak.net:/www/codespeak.net/htdocs/pypy/trunk/pypy/ - -* post announcement on morepypy.blogspot.com -* send announcements to pypy-dev, python-list, - python-announce, python-dev ... diff --git a/pypy/doc/config/objspace.usemodules.select.txt b/pypy/doc/config/objspace.usemodules.select.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.select.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'select' module. -This module is expected to be fully working. diff --git a/pypy/doc/config/objspace.std.getattributeshortcut.txt b/pypy/doc/config/objspace.std.getattributeshortcut.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.std.getattributeshortcut.txt +++ /dev/null @@ -1,1 +0,0 @@ -Performance only: track types that override __getattribute__. diff --git a/pypy/doc/config/objspace.usemodules.bz2.txt b/pypy/doc/config/objspace.usemodules.bz2.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.bz2.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'bz2' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/discussion/emptying-the-malloc-zoo.txt b/pypy/doc/discussion/emptying-the-malloc-zoo.txt deleted file mode 100644 --- a/pypy/doc/discussion/emptying-the-malloc-zoo.txt +++ /dev/null @@ -1,40 +0,0 @@ -.. coding: utf-8 - -Emptying the malloc zoo -======================= - -Around the end-of-the-EU-project time there were two major areas of -obscurity in the memory management area: - - 1. The confusing set of operations that the low-level backend are - expected to implement. - - 2. The related, but slightly different, confusion of the various - "flavours" of malloc: what's the difference between - lltype.malloc(T, flavour='raw') and llmemory.raw_malloc(sizeof(T))? - -At the post-ep2007 sprint, Samuele and Michael attacked the first -problem a bit: making the Boehm GC transformer only require three -simple operations of the backend. This could be extending still -further by having the gc transformer use rffi to insert calls to the -relevant Boehm functions^Wmacros, and then the backend wouldn't need -to know anything about Boehm at all (but... LLVM). - -A potential next step is to work out what we want the "llpython" -interface to memory management to be. - -There are various use cases: - -**lltype.malloc(T) – T is a fixed-size GC container** - - This is the default case. Non-pointers inside the allocated memory - will not be zeroed. The object will be managed by the GC, no - deallocation required. - -**lltype.malloc(T, zero=True) – T is a GC container** - - As above, but all fields will be cleared. - -**lltype.malloc(U, raw=True) – U is not a GC container** - - Blah. diff --git a/pypy/doc/config/objspace.usemodules._md5.txt b/pypy/doc/config/objspace.usemodules._md5.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._md5.txt +++ /dev/null @@ -1,5 +0,0 @@ -Use the built-in '_md5' module. -This module is expected to be working and is included by default. -There is also a pure Python version in lib_pypy which is used -if the built-in is disabled, but it is several orders of magnitude -slower. diff --git a/pypy/doc/config/translation.cli.txt b/pypy/doc/config/translation.cli.txt deleted file mode 100644 --- a/pypy/doc/config/translation.cli.txt +++ /dev/null @@ -1,1 +0,0 @@ -.. intentionally empty diff --git a/pypy/doc/config/translation.platform.txt b/pypy/doc/config/translation.platform.txt deleted file mode 100644 --- a/pypy/doc/config/translation.platform.txt +++ /dev/null @@ -1,1 +0,0 @@ -select the target platform, in case of cross-compilation diff --git a/pypy/doc/config/translation.backendopt.mallocs.txt b/pypy/doc/config/translation.backendopt.mallocs.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.mallocs.txt +++ /dev/null @@ -1,29 +0,0 @@ -This optimization enables "malloc removal", which "explodes" -allocations of structures which do not escape from the function they -are allocated in into one or more additional local variables. - -An example. Consider this rather unlikely seeming code:: - - class C: - pass - def f(y): - c = C() - c.x = y - return c.x - -Malloc removal will spot that the ``C`` object can never leave ``f`` -and replace the above with code like this:: - - def f(y): - _c__x = y - return _c__x - -It is rare for code to be directly written in a way that allows this -optimization to be useful, but inlining often results in opportunities -for its use (and indeed, this is one of the main reasons PyPy does its -own inlining rather than relying on the C compilers). - -For much more information about this and other optimizations you can -read section 4.1 of the technical report on "Massive Parallelism and -Translation Aspects" which you can find on the `Technical reports page -<../index-report.html>`__. diff --git a/pypy/doc/config/objspace.logbytecodes.txt b/pypy/doc/config/objspace.logbytecodes.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.logbytecodes.txt +++ /dev/null @@ -1,3 +0,0 @@ -Internal option. - -.. internal diff --git a/pypy/doc/config/translation.dump_static_data_info.txt b/pypy/doc/config/translation.dump_static_data_info.txt deleted file mode 100644 --- a/pypy/doc/config/translation.dump_static_data_info.txt +++ /dev/null @@ -1,3 +0,0 @@ -Dump information about static prebuilt constants, to the file -TARGETNAME.staticdata.info in the /tmp/usession-... directory. This file can -be later inspected using the script ``bin/reportstaticdata.py``. diff --git a/pypy/doc/config/objspace.usemodules.zlib.txt b/pypy/doc/config/objspace.usemodules.zlib.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.zlib.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the 'zlib' module. -This module is expected to be working and is included by default. diff --git a/pypy/doc/config/translation.backendopt.inline_heuristic.txt b/pypy/doc/config/translation.backendopt.inline_heuristic.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.inline_heuristic.txt +++ /dev/null @@ -1,4 +0,0 @@ -Internal option. Switch to a different weight heuristic for inlining. -This is for basic inlining (:config:`translation.backendopt.inline`). - -.. internal diff --git a/pypy/doc/distribution.txt b/pypy/doc/distribution.txt deleted file mode 100644 --- a/pypy/doc/distribution.txt +++ /dev/null @@ -1,111 +0,0 @@ - -======================== -lib/distributed features -======================== - -The 'distributed' library is an attempt to provide transparent, lazy -access to remote objects. This is accomplished using -`transparent proxies`_ and in application level code (so as a pure -python module). - -The implementation uses an RPC-like protocol, which accesses -only members of objects, rather than whole objects. This means it -does not rely on objects being pickleable, nor on having the same -source code available on both sides. On each call, only the members -that are used on the client side are retrieved, objects which -are not used are merely references to their remote counterparts. - -As an example, let's imagine we have a remote object, locally available -under the name `x`. Now we call:: - - >>>> x.foo(1, [1,2,3], y) - -where y is some instance of a local, user-created class. - -Under water, x.\_\_getattribute\_\_ is called, with argument 'foo'. In the -\_\_getattribute\_\_ implementation, the 'foo' attribute is requested, and the -remote side replies by providing a bound method. On the client this bound -method appears as a remote reference: this reference is called with a remote -reference to x as self, the integer 1 which is copied as a primitive type, a -reference to a list and a reference to y. The remote side receives this call, -processes it as a call to the bound method x.foo, where 'x' is resolved as a -local object, 1 as an immutable primitive, [1,2,3] as a reference to a mutable -primitive and y as a reference to a remote object. If the type of y is not -known on the remote side, it is faked with just about enough shape (XXX?!?) to -be able to perform the required operations. The contents of the list are -retrieved when they're needed. - -An advantage of this approach is that a user can have remote references to -internal interpreter types, like frames, code objects and tracebacks. In a demo -directory there is an example of using this to attach pdb.post\_mortem() to a -remote traceback. Another advantage is that there's a minimal amount of data -transferred over the network. On the other hand, there are a large amount of -packages sent to the remote side - hopefully this will be improved in future. - -The 'distributed' lib is uses an abstract network layer, which means you -can provide custom communication channels just by implementing -two functions that send and receive marshallable objects (no pickle needed!). - -Exact rules of copying ----------------------- - -- Immutable primitives are always transferred - -- Mutable primitives are transferred as a reference, but several operations - (like iter()) force them to be transferred fully - -- Builtin exceptions are transferred by name - -- User objects are always faked on the other side, with enough shape - transferred - -XXX finish, basic interface, example, build some stuff on top of greenlets - -Related work comparison ------------------------ - -There are a lot of attempts to incorporate RPC mechanism into -Python, some of them are listed below: - -* `Pyro`_ - Pyro stands for PYthon Remote Objects, it's a mechanism of - implementing remotely accessible objects in pure python (without modifying - interpreter). This is only a remote method call implementation, with - all limitations, so: - - - No attribute access - - - Arguments of calls must be pickleable on one side and unpickleable on - remote side, which means they must share source code, they do not - become remote references - - - Exported objects must inherit from specific class and follow certain - standards, like \_\_init\_\_ shape. - - - Remote tracebacks only as strings - - - Remote calls usually invokes new threads - -* XMLRPC - There are several implementations of xmlrpc protocol in Python, - one even in the standard library. Xmlrpc is cross-language, cross-platform - protocol of communication, which implies great flexibility of tools to - choose, but also implies several limitations, like: - - - No remote tracebacks - - - Only simple types to be passed as function arguments - -* Twisted Perspective Broker - - - involves twisted, which ties user to network stack/programming style - - - event driven programming (might be good, might be bad, but it's fixed) - - - copies object (by pickling), but provides sophisticated layer of - caching to avoid multiple copies of the same object. - - - two way RPC (unlike Pyro) - - - also heavy restrictions on objects - they must subclass certain class - -.. _`Pyro`: http://pyro.sourceforge.net/ -.. _`transparent proxies`: objspace-proxies.html#tproxy diff --git a/pypy/doc/cpython_differences.txt b/pypy/doc/cpython_differences.txt deleted file mode 100644 --- a/pypy/doc/cpython_differences.txt +++ /dev/null @@ -1,225 +0,0 @@ -==================================== -Differences between PyPy and CPython -==================================== - -This page documents the few differences and incompatibilities between -the PyPy Python interpreter and CPython. Some of these differences -are "by design", since we think that there are cases in which the -behaviour of CPython is buggy, and we do not want to copy bugs. - -Differences that are not listed here should be considered bugs of -PyPy. - - -Extension modules ------------------ - -List of extension modules that we support: - -* Supported as built-in modules (in `pypy/module/`_): - - __builtin__ - `__pypy__`_ - _ast - _bisect - _codecs - _lsprof - `_minimal_curses`_ - _random - `_rawffi`_ - _ssl - _socket - _sre - _weakref - array - bz2 - cStringIO - `cpyext`_ - crypt - errno - exceptions - fcntl - gc - itertools - marshal - math - md5 - mmap - operator - parser - posix - pyexpat - select - sha - signal - struct - symbol - sys - termios - thread - time - token - unicodedata - zipimport - zlib - - When translated to Java or .NET, the list is smaller; see - `pypy/config/pypyoption.py`_ for details. - - When translated on Windows, a few Unix-only modules are skipped, - and the following module is built instead: - - _winreg - - Extra module with Stackless_ only: - - _stackless - -* Supported by being rewritten in pure Python (possibly using ``ctypes``): - see the `lib_pypy/`_ directory. Examples of modules that we - support this way: ``ctypes``, ``cPickle``, - ``cStringIO``, ``cmath``, ``dbm`` (?), ``datetime``, ``binascii``... - Note that some modules are both in there and in the list above; - by default, the built-in module is used (but can be disabled - at translation time). - -The extension modules (i.e. modules written in C, in the standard CPython) -that are neither mentioned above nor in `lib_pypy/`_ are not available in PyPy. -(You may have a chance to use them anyway with `cpyext`_.) - -.. the nonstandard modules are listed below... -.. _`__pypy__`: __pypy__-module.html -.. _`_rawffi`: ctypes-implementation.html -.. _`_minimal_curses`: config/objspace.usemodules._minimal_curses.html -.. _`cpyext`: http://morepypy.blogspot.com/2010/04/using-cpython-extension-modules-with.html -.. _Stackless: stackless.html - - -Differences related to garbage collection strategies ----------------------------------------------------- - -Most of the garbage collectors used or implemented by PyPy are not based on -reference counting, so the objects are not freed instantly when they are no -longer reachable. The most obvious effect of this is that files are not -promptly closed when they go out of scope. For files that are opened for -writing, data can be left sitting in their output buffers for a while, making -the on-disk file appear empty or truncated. - -Fixing this is essentially not possible without forcing a -reference-counting approach to garbage collection. The effect that you -get in CPython has clearly been described as a side-effect of the -implementation and not a language design decision: programs relying on -this are basically bogus. It would anyway be insane to try to enforce -CPython's behavior in a language spec, given that it has no chance to be -adopted by Jython or IronPython (or any other port of Python to Java or -.NET, like PyPy itself). - -This affects the precise time at which __del__ methods are called, which -is not reliable in PyPy (nor Jython nor IronPython). It also means that -weak references may stay alive for a bit longer than expected. This -makes "weak proxies" (as returned by ``weakref.proxy()``) somewhat less -useful: they will appear to stay alive for a bit longer in PyPy, and -suddenly they will really be dead, raising a ``ReferenceError`` on the -next access. Any code that uses weak proxies must carefully catch such -``ReferenceError`` at any place that uses them. - -There are a few extra implications for the difference in the GC. Most -notably, if an object has a __del__, the __del__ is never called more -than once in PyPy; but CPython will call the same __del__ several times -if the object is resurrected and dies again. The __del__ methods are -called in "the right" order if they are on objects pointing to each -other, as in CPython, but unlike CPython, if there is a dead cycle of -objects referencing each other, their __del__ methods are called anyway; -CPython would instead put them into the list ``garbage`` of the ``gc`` -module. More information is available on the blog `[1]`__ `[2]`__. - -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-1.html -.. __: http://morepypy.blogspot.com/2008/02/python-finalizers-semantics-part-2.html - -Using the default GC called ``minimark``, the built-in function ``id()`` -works like it does in CPython. With other GCs it returns numbers that -are not real addresses (because an object can move around several times) -and calling it a lot can lead to performance problem. - -Note that if you have a long chain of objects, each with a reference to -the next one, and each with a __del__, PyPy's GC will perform badly. On -the bright side, in most other cases, benchmarks have shown that PyPy's -GCs perform much better than CPython's. - -Another difference is that if you add a ``__del__`` to an existing class it will -not be called:: - - >>>> class A(object): - .... pass - .... - >>>> A.__del__ = lambda self: None - __main__:1: RuntimeWarning: a __del__ method added to an existing type will not be called - - -Subclasses of built-in types ----------------------------- - -Officially, CPython has no rule at all for when exactly -overridden method of subclasses of built-in types get -implicitly called or not. As an approximation, these methods -are never called by other built-in methods of the same object. -For example, an overridden ``__getitem__()`` in a subclass of -``dict`` will not be called by e.g. the built-in ``get()`` -method. - -The above is true both in CPython and in PyPy. Differences -can occur about whether a built-in function or method will -call an overridden method of *another* object than ``self``. -In PyPy, they are generally always called, whereas not in -CPython. For example, in PyPy, ``dict1.update(dict2)`` -considers that ``dict2`` is just a general mapping object, and -will thus call overridden ``keys()`` and ``__getitem__()`` -methods on it. So the following code prints ``42`` on PyPy -but ``foo`` on CPython:: - - >>>> class D(dict): - .... def __getitem__(self, key): - .... return 42 - .... - >>>> - >>>> d1 = {} - >>>> d2 = D(a='foo') - >>>> d1.update(d2) - >>>> print d1['a'] - 42 - - -Ignored exceptions ------------------------ - -In many corner cases, CPython can silently swallow exceptions. -The precise list of when this occurs is rather long, even -though most cases are very uncommon. The most well-known -places are custom rich comparison methods (like \_\_eq\_\_); -dictionary lookup; calls to some built-in functions like -isinstance(). - -Unless this behavior is clearly present by design and -documented as such (as e.g. for hasattr()), in most cases PyPy -lets the exception propagate instead. - - -Miscellaneous -------------- - -* ``sys.setrecursionlimit()`` is ignored (and not needed) on - PyPy. On CPython it would set the maximum number of nested - calls that can occur before a RuntimeError is raised; on PyPy - overflowing the stack also causes RuntimeErrors, but the limit - is checked at a lower level. (The limit is currently hard-coded - at 768 KB, corresponding to roughly 1480 Python calls on - Linux.) - -* assignment to ``__class__`` is limited to the cases where it - works on CPython 2.5. On CPython 2.6 and 2.7 it works in a bit - more cases, which are not supported by PyPy so far. (If needed, - it could be supported, but then it will likely work in many - *more* case on PyPy than on CPython 2.6/2.7.) - - -.. include:: _ref.txt diff --git a/pypy/doc/config/translation.backendopt.constfold.txt b/pypy/doc/config/translation.backendopt.constfold.txt deleted file mode 100644 --- a/pypy/doc/config/translation.backendopt.constfold.txt +++ /dev/null @@ -1,1 +0,0 @@ -Do constant folding of operations and constant propagation on flowgraphs. diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -158,16 +158,24 @@ self.emit_operation(op) def optimize_CALL_PURE(self, op): + arg_consts = [] for i in range(op.numargs()): arg = op.getarg(i) - if self.get_constant_box(arg) is None: + const = self.get_constant_box(arg) + if const is None: break + arg_consts.append(const) else: - # all constant arguments: constant-fold away - self.make_constant(op.result, op.getarg(0)) - return + # all constant arguments: check if we already know the reslut + try: + result = self.optimizer.call_pure_results[arg_consts] + except KeyError: + pass + else: + self.make_constant(op.result, result) + return # replace CALL_PURE with just CALL - args = op.getarglist()[1:] + args = op.getarglist() self.emit_operation(ResOperation(rop.CALL, args, op.result, op.getdescr())) diff --git a/pypy/doc/buildtool.txt b/pypy/doc/buildtool.txt deleted file mode 100644 --- a/pypy/doc/buildtool.txt +++ /dev/null @@ -1,249 +0,0 @@ -============ -PyPyBuilder -============ - -What is this? -============= - -PyPyBuilder is an application that allows people to build PyPy instances on -demand. If you have a nice idle machine connected to the Internet, and don't -mind us 'borrowing' it every once in a while, you can start up the client -script (in bin/client) and have the server send compile jobs to your machine. -If someone requests a build of PyPy that is not already available on the PyPy -website, and your machine is capable of making such a build, the server may ask -your machine to create it. If enough people participate, with diverse enough -machines, a 'build farm' is created. - -Quick usage instructions -======================== - -For the impatient, that just want to get started, some quick instructions. - -First you'll need to have a checkout of the 'buildtool' package, that can -be found here:: - - https://codespeak.net/svn/pypy/build/buildtool - -To start a compilation, run (from the buildtool root directory):: - - $ ./bin/startcompile.py [options] - -where the options can be found by using --help, and the email address will be -used to send mail to once the compilation is finished. - -To start a build server, to participate in the build farm, do:: - - $ ./bin/buildserver.py - -That's it for the compilation script and build server, if you have your own -project and want to set up your own meta server, you'll have to be a bit more -patient and read the details below... - -Components -========== - -The application consists of 3 main components: a meta server component, a -client component that handles compilations (let's call this a 'build server') -and a small client component to start compile jobs (which we'll call -'requesting clients' for now). - -The server waits for build server to register, and for compile job -requests. When participating clients register, they pass the server information -about what compilations the system can handle (system info), and a set of -options to use for compilation (compile info). - -When now a requesting client requests a compilation job, the server checks -whether a suitable binary is already available based on the system and compile -info, and if so returns that. If there isn't one, the server walks through a -list of connected participating clients to see if one of them can handle the -job, and if so dispatches the compilation. If there's no participating client -to handle the job, it gets queued until there is. - -If a client crashes during compilation, the build is restarted, or error -information is sent to the logs and requesting client, depending on the type of -error. As long as no compilation error occurs (read: on disconnects, system -errors, etc.) compilation will be retried until a build is available. - -Once a build is available, the server will send an email to all clients waiting -for the build (it could be that more than one person asked for some build at -the same time!). - -Configuration -============= - -There are several aspects to configuration on this system. Of course, for the -meta server, build server and startcompile components there is configuration -for the host and port to connect to, and there is some additional configuration -for things like which mailhost to use (only applies to the server), but also -there is configuration data passed around to determine what client is picked, -and what the client needs to compile exactly. - -Config file ------------ - -The host/port configuration etc. can be found in the file 'config.py' in the -build tool dir. There are several things that can be configured here, mostly -related to what application to build, and where to build it. Please read the -file carefully when setting up a new build network, or when participating for -compilation, because certain items (e.g. the svnpath_to_url function, or the -client_checkers) can make the system a lot less secure when not configured -properly. - -Note that all client-related configuration is done from command-line switches, -so the configuration file is supposed to be changed on a per-project basis: -unless you have specific needs, use a test version of the build tool, or are -working on another project than PyPy, you will not want to modify the it. - -System configuration --------------------- - -This information is used by the client and startcompile components. On the -participating clients this information is retrieved by querying the system, on -the requesting clients the system values are used by default, but may be -overridden (so a requesting client running an x86 can still request PPC builds, -for instance). The clients compare their own system config to that of a build -request, and will (should) refuse a build if it can not be executed because -of incompatibilities. - -Compilation configuration -------------------------- - -The third form of configuration is that of the to-be-built application itself, -its compilation arguments. This configuration is only provided by the -requesting clients, build servers can examine the information and refuse a -compilation based on this configuration (just like with the system config, see -'client_checkers' in 'config.py'). Compilation configuration can be controlled -using command-line arguments (use 'bin/startcompile.py --help' for an -overview). - -Build tool options ------------------- - -Yet another part of the configuration are the options that are used by the -startcompile.py script itself: the user can specify what SVN path (relative to -a certain base path) and what Subversion revision is desired. The revision can -either be specified exactly, or as a range of versions. - -Installation -============ - -Build Server ------------- - -Installing the system should not be required: just run './bin/buildserver' to -start. Note that it depends on the `py lib`_ (as does the rest of PyPy). - -When starting a build server with PyPy's default configuration, it will connect -to a meta server we have running in codespeak.net. - -Meta Server ------------ - -Also for the server there's no real setup required, and again there's a -dependency on the `py lib`_. Starting it is done by running -'./bin/metaserver'. - -Running a compile job ---------------------- - -Again installation is not required, just run './bin/startcompile.py [options] -' (see --help for the options) to start. Again, you need to have the -`py lib`_ installed. - -Normally the codespeak.net meta server will be used when this script is issued. - -.. _`py lib`: http://codespeak.net/py - -Using the build tool for other projects -======================================= - -The code for the build tool is meant to be generic. Using it for other projects -than PyPy (for which it was originally written) is relatively straight-forward: -just change the configuration, and implement a build client script (probably -highly resembling bin/buildserver.py). - -Note that there is a test project in 'tool/build/testproject' that can serve -as an example. - -Prerequisites --------------- - -Your project can use the build tool if: - - * it can be built from Python - - Of course this is a rather vague requirement: theoretically _anything_ can - be built from Python; it's just a matter of integrating it into the tool - properly... A project that can entirely be built from Python code (like - PyPy) is easier to integrate than something that is built from the command - line, though (although implementing that won't be very hard either, see - the test project for instance). - - * it is located in Subversion - - The build tool makes very little hard-coded assumptions, but having code - in Subversion is one of them. There are several locations in the code where - SVN is assumed: the command line options (see `build tool options`_), - the server (which checks SVN urls for validity, and converts HEAD revision - requests to actual revision ids) and and build client (which checks out the - data) all make this assumption, changing to a different revision control - system is currently not easy and unsupported (but who knows what the future - will bring). - - * it uses PyPy's config mechanism - - PyPy has a very nice, generic configuration mechanism (essentially wrapper - OptionParser stuff) that makes dealing with fragmented configuration - and command-line options a lot easier. This mechanism is used by the build - tool: it assumes configuration is provided in this format. If your project - uses this configuration mechanism already, you can provide the root Config - object from config.compile_config; if not it should be fairly straight- - forward to wrap your existing configuration with the PyPy stuff. - -Basically that's it: if your project is stored in SVN, and you don't mind using -Python a bit, it shouldn't be too hard to get things going (note that more -documentation about this subject will follow in the future). - -Web Front-End -============= - -To examine the status of the meta server, connected build servers and build -requests, there is a web server available. This can be started using -'./bin/webserver' and uses port 8080 by default (override in -config.py). - -The web server presents a number of different pages: - - * / and /metaserverstatus - meta server status - - this displays a small list of information about the meta server, such - as the amount of connected build servers, the amount of builds available, - the amount of waiting clients, etc. - - * /buildservers - connected build servers - - this page contains a list of all connected build servers, system - information and what build they're currently working on (if any) - - * /builds - a list of builds - - here you'll find a list of all builds, both done and in-progress and - queued ones, with links to the details pages, the date they were - requested and their status - - * /build/ - build details - - the 'build' (virtual) directory contains pages of information for each - build - each of those pages displays status information, time requested, - time started and finished (if appropriate), links to the zip and logs, - and system and compile information - -There's a build tool status web server for the meta server on codespeak.net -available at http://codespeak.net/pypy/buildstatus/. - -More info -========= - -For more information, bug reports, patches, etc., please send an email to -guido at merlinux.de. - diff --git a/pypy/doc/config/objspace.usemodules.rctime.txt b/pypy/doc/config/objspace.usemodules.rctime.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.rctime.txt +++ /dev/null @@ -1,7 +0,0 @@ -Use the 'rctime' module. - -'rctime' is our `rffi`_ based implementation of the builtin 'time' module. -It supersedes the less complete :config:`objspace.usemodules.time`, -at least for C-like targets (the C and LLVM backends). - -.. _`rffi`: ../rffi.html diff --git a/pypy/doc/config/translation.debug.txt b/pypy/doc/config/translation.debug.txt deleted file mode 100644 --- a/pypy/doc/config/translation.debug.txt +++ /dev/null @@ -1,2 +0,0 @@ -Record extra debugging information during annotation. This leads to slightly -less obscure error messages. diff --git a/pypy/doc/discussion/improve-rpython.txt b/pypy/doc/discussion/improve-rpython.txt deleted file mode 100644 --- a/pypy/doc/discussion/improve-rpython.txt +++ /dev/null @@ -1,93 +0,0 @@ -Possible improvements of the rpython language -============================================= - -Improve the interpreter API ---------------------------- - -- Rationalize the modules, and the names, of the different functions needed to - implement a pypy module. A typical rpython file is likely to contain many - `import` statements:: - - from pypy.interpreter.baseobjspace import Wrappable - from pypy.interpreter.gateway import ObjSpace, W_Root, NoneNotWrapped - from pypy.interpreter.argument import Arguments - from pypy.interpreter.typedef import TypeDef, GetSetProperty - from pypy.interpreter.typedef import interp_attrproperty, interp_attrproperty_w - from pypy.interpreter.gateway import interp2app - from pypy.interpreter.error import OperationError - from pypy.rpython.lltypesystem import rffi, lltype - -- A more direct declarative way to write Typedef:: - - class W_Socket(Wrappable): - _typedef_name_ = 'socket' - _typedef_base_ = W_EventualBaseClass - - @interp2app_method("connect", ['self', ObjSpace, W_Root]) - def connect_w(self, space, w_addr): - ... - -- Support for metaclasses written in rpython. For a sample, see the skipped test - `pypy.objspace.std.test.TestTypeObject.test_metaclass_typedef` - -RPython language ----------------- - -- Arithmetic with unsigned integer, and between integer of different signedness, - when this is not ambiguous. At least, comparison and assignment with - constants should be allowed. - -- Allocate variables on the stack, and pass their address ("by reference") to - llexternal functions. For a typical usage, see - `pypy.rlib.rsocket.RSocket.getsockopt_int`. - -- Support context managers and the `with` statement. This could be a workaround - before the previous point is available. - -Extensible type system for llexternal -------------------------------------- - -llexternal allows the description of a C function, and conveys the same -information about the arguments as a C header. But this is often not enough. -For example, a parameter of type `int*` is converted to -`rffi.CArrayPtr(rffi.INT)`, but this information is not enough to use the -function. The parameter could be an array of int, a reference to a single value, -for input or output... - -A "type system" could hold this additional information, and automatically -generate some conversion code to ease the usage of the function from -rpython. For example:: - - # double frexp(double x, int *exp); - frexp = llexternal("frexp", [rffi.DOUBLE, OutPtr(rffi.int)], rffi.DOUBLE) - -`OutPtr` indicates that the parameter is output-only, which need not to be -initialized, and which *value* is returned to the caller. In rpython the call -becomes:: - - fraction, exponent = frexp(value) - -Also, we could imagine that one item in the llexternal argument list corresponds -to two parameters in C. Here, OutCharBufferN indicates that the caller will pass -a rpython string; the framework will pass buffer and length to the function:: - - # ssize_t write(int fd, const void *buf, size_t count); - write = llexternal("write", [rffi.INT, CharBufferAndSize], rffi.SSIZE_T) - -The rpython code that calls this function is very simple:: - - written = write(fd, data) - -compared with the present:: - - count = len(data) - buf = rffi.get_nonmovingbuffer(data) - try: - written = rffi.cast(lltype.Signed, os_write( - rffi.cast(rffi.INT, fd), - buf, rffi.cast(rffi.SIZE_T, count))) - finally: - rffi.free_nonmovingbuffer(data, buf) - -Typemaps are very useful for large APIs where the same conversions are needed in -many places. XXX example diff --git a/pypy/doc/config/translation.rweakref.txt b/pypy/doc/config/translation.rweakref.txt deleted file mode 100644 --- a/pypy/doc/config/translation.rweakref.txt +++ /dev/null @@ -1,3 +0,0 @@ -This indicates if the backend and GC policy support RPython-level weakrefs. -Can be tested in an RPython program to select between two implementation -strategies. diff --git a/pypy/doc/interpreter-optimizations.txt b/pypy/doc/interpreter-optimizations.txt deleted file mode 100644 --- a/pypy/doc/interpreter-optimizations.txt +++ /dev/null @@ -1,357 +0,0 @@ -================================== -Standard Interpreter Optimizations -================================== - -.. contents:: Contents - -Introduction -============ - -One of the advantages -- indeed, one of the motivating goals -- of the PyPy -standard interpreter (compared to CPython) is that of increased flexibility and -configurability. - -One example of this is that we can provide several implementations of the same -object (e.g. lists) without exposing any difference to application-level -code. This makes it easy to provide a specialized implementation of a type that -is optimized for a certain situation without disturbing the implementation for -the regular case. - -This document describes several such optimizations. Most of them are not -enabled by default. Also, for many of these optimizations it is not clear -whether they are worth it in practice for a real-world application (they sure -make some microbenchmarks a lot faster and use less memory, which is not saying -too much). If you have any observation in that direction, please let us know! -By the way: alternative object implementations are a great way to get into PyPy -development since you have to know only a rather small part of PyPy to do -them. And they are fun too! - -.. describe other optimizations! - -Object Optimizations -==================== - -String Optimizations --------------------- - -String-Join Objects -+++++++++++++++++++ - -String-join objects are a different implementation of the Python ``str`` type, -They represent the lazy addition of several strings without actually performing -the addition (which involves copying etc.). When the actual value of the string -join object is needed, the addition is performed. This makes it possible to -perform repeated string additions in a loop without using the -``"".join(list_of_strings)`` pattern. - -You can enable this feature enable with the :config:`objspace.std.withstrjoin` -option. - -String-Slice Objects -++++++++++++++++++++ - -String-slice objects are another implementation of the Python ``str`` type. -They represent the lazy slicing of a string without actually performing the -slicing (which would involve copying). This is only done for slices of step -one. When the actual value of the string slice object is needed, the slicing -is done (although a lot of string methods don't make this necessary). This -makes string slicing a very efficient operation. It also saves memory in some -cases but can also lead to memory leaks, since the string slice retains a -reference to the original string (to make this a bit less likely, we don't -use lazy slicing when the slice would be much shorter than the original -string. There is also a minimum number of characters below which being lazy -is not saving any time over making the copy). - -You can enable this feature with the :config:`objspace.std.withstrslice` option. - -Ropes -+++++ - -Ropes are a general flexible string implementation, following the paper `"Ropes: -An alternative to Strings."`_ by Boehm, Atkinson and Plass. Strings are -represented as balanced concatenation trees, which makes slicing and -concatenation of huge strings efficient. - -Using ropes is usually not a huge benefit for normal Python programs that use -the typical pattern of appending substrings to a list and doing a -``"".join(l)`` at the end. If ropes are used, there is no need to do that. -A somewhat silly example of things you can do with them is this:: - - $ bin/py.py --objspace-std-withrope - faking - PyPy 0.99.0 in StdObjSpace on top of Python 2.4.4c1 (startuptime: 17.24 secs) - >>>> import sys - >>>> sys.maxint - 2147483647 - >>>> s = "a" * sys.maxint - >>>> s[10:20] - 'aaaaaaaaaa' - - -You can enable this feature with the :config:`objspace.std.withrope` option. - -.. _`"Ropes: An alternative to Strings."`: http://www.cs.ubc.ca/local/reading/proceedings/spe91-95/spe/vol25/issue12/spe986.pdf - -Integer Optimizations ---------------------- - -Caching Small Integers -++++++++++++++++++++++ - -Similar to CPython, it is possible to enable caching of small integer objects to -not have to allocate all the time when doing simple arithmetic. Every time a new -integer object is created it is checked whether the integer is small enough to -be retrieved from the cache. - -This option is enabled by default. - -Integers as Tagged Pointers -+++++++++++++++++++++++++++ - -An even more aggressive way to save memory when using integers is "small int" -integer implementation. It is another integer implementation used for integers -that only needs 31 bits (or 63 bits on a 64 bit machine). These integers -are represented as tagged pointers by setting their lowest bits to distinguish -them from normal pointers. This completely avoids the boxing step, saving -time and memory. - -You can enable this feature with the :config:`objspace.std.withsmallint` option. - -Dictionary Optimizations ------------------------- - -Multi-Dicts -+++++++++++ - -Multi-dicts are a special implementation of dictionaries. It became clear that -it is very useful to *change* the internal representation of an object during -its lifetime. Multi-dicts are a general way to do that for dictionaries: they -provide generic support for the switching of internal representations for -dicts. - -If you just enable multi-dicts, special representations for empty dictionaries, -for string-keyed dictionaries. In addition there are more specialized dictionary -implementations for various purposes (see below). - -This is now the default implementation of dictionaries in the Python interpreter. -option. - -Sharing Dicts -+++++++++++++ - -Sharing dictionaries are a special representation used together with multidicts. -This dict representation is used only for instance dictionaries and tries to -make instance dictionaries use less memory (in fact, in the ideal case the -memory behaviour should be mostly like that of using __slots__). - -The idea is the following: Most instances of the same class have very similar -attributes, and are even adding these keys to the dictionary in the same order -while ``__init__()`` is being executed. That means that all the dictionaries of -these instances look very similar: they have the same set of keys with different -values per instance. What sharing dicts do is store these common keys into a -common structure object and thus save the space in the individual instance -dicts: -the representation of the instance dict contains only a list of values. - -A more advanced version of sharing dicts, called *map dicts,* is available -with the :config:`objspace.std.withmapdict` option. - -Builtin-Shadowing -+++++++++++++++++ - -Usually the calling of builtins in Python requires two dictionary lookups: first -to see whether the current global dictionary contains an object with the same -name, then a lookup in the ``__builtin__`` dictionary. This is somehow -circumvented by storing an often used builtin into a local variable to get -the fast local lookup (which is a rather strange and ugly hack). - -The same problem is solved in a different way by "wary" dictionaries. They are -another dictionary representation used together with multidicts. This -representation is used only for module dictionaries. The representation checks on -every setitem whether the key that is used is the name of a builtin. If this is -the case, the dictionary is marked as shadowing that particular builtin. - -To identify calls to builtins easily, a new bytecode (``CALL_LIKELY_BUILTIN``) -is introduced. Whenever it is executed, the globals dictionary is checked -to see whether it masks the builtin (which is possible without a dictionary -lookup). Then the ``__builtin__`` dict is checked in the same way, -to see whether somebody replaced the real builtin with something else. In the -common case, the program didn't do any of these; the proper builtin can then -be called without using any dictionary lookup at all. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - - -List Optimizations ------------------- - -Range-Lists -+++++++++++ - -Range-lists solve the same problem that the ``xrange`` builtin solves poorly: -the problem that ``range`` allocates memory even if the resulting list is only -ever used for iterating over it. Range lists are a different implementation for -lists. They are created only as a result of a call to ``range``. As long as the -resulting list is used without being mutated, the list stores only the start, stop -and step of the range. Only when somebody mutates the list the actual list is -created. This gives the memory and speed behaviour of ``xrange`` and the generality -of use of ``range``, and makes ``xrange`` essentially useless. - -You can enable this feature with the :config:`objspace.std.withrangelist` -option. - - -User Class Optimizations ------------------------- - -Shadow Tracking -+++++++++++++++ - -Shadow tracking is a general optimization that speeds up method calls for user -classes (that don't have special meta-class). For this a special dict -representation is used together with multidicts. This dict representation is -used only for instance dictionaries. The instance dictionary tracks whether an -instance attribute shadows an attribute of its class. This makes method calls -slightly faster in the following way: When calling a method the first thing that -is checked is the class dictionary to find descriptors. Normally, when a method -is found, the instance dictionary is then checked for instance attributes -shadowing the class attribute. If we know that there is no shadowing (since -instance dict tells us that) we can save this lookup on the instance dictionary. - -*This was deprecated and is no longer available.* - - -Method Caching -++++++++++++++ - -Shadow tracking is also an important building block for the method caching -optimization. A method cache is introduced where the result of a method lookup -is stored (which involves potentially many lookups in the base classes of a -class). Entries in the method cache are stored using a hash computed from -the name being looked up, the call site (i.e. the bytecode object and -the current program counter), and a special "version" of the type where the -lookup happens (this version is incremented every time the type or one of its -base classes is changed). On subsequent lookups the cached version can be used, -as long as the instance did not shadow any of its classes attributes. - -You can enable this feature with the :config:`objspace.std.withmethodcache` -option. - -Interpreter Optimizations -========================= - -Special Bytecodes ------------------ - -.. _`lookup method call method`: - -LOOKUP_METHOD & CALL_METHOD -+++++++++++++++++++++++++++ - -An unusual feature of Python's version of object oriented programming is the -concept of a "bound method". While the concept is clean and powerful, the -allocation and initialization of the object is not without its performance cost. -We have implemented a pair of bytecodes that alleviate this cost. - -For a given method call ``obj.meth(x, y)``, the standard bytecode looks like -this:: - - LOAD_GLOBAL obj # push 'obj' on the stack - LOAD_ATTR meth # read the 'meth' attribute out of 'obj' - LOAD_GLOBAL x # push 'x' on the stack - LOAD_GLOBAL y # push 'y' on the stack - CALL_FUNCTION 2 # call the 'obj.meth' object with arguments x, y - -We improved this by keeping method lookup separated from method call, unlike -some other approaches, but using the value stack as a cache instead of building -a temporary object. We extended the bytecode compiler to (optionally) generate -the following code for ``obj.meth(x)``:: - - LOAD_GLOBAL obj - LOOKUP_METHOD meth - LOAD_GLOBAL x - LOAD_GLOBAL y - CALL_METHOD 2 - -``LOOKUP_METHOD`` contains exactly the same attribute lookup logic as -``LOAD_ATTR`` - thus fully preserving semantics - but pushes two values onto the -stack instead of one. These two values are an "inlined" version of the bound -method object: the *im_func* and *im_self*, i.e. respectively the underlying -Python function object and a reference to ``obj``. This is only possible when -the attribute actually refers to a function object from the class; when this is -not the case, ``LOOKUP_METHOD`` still pushes two values, but one *(im_func)* is -simply the regular result that ``LOAD_ATTR`` would have returned, and the other -*(im_self)* is a None placeholder. - -After pushing the arguments, the layout of the stack in the above -example is as follows (the stack grows upwards): - -+---------------------------------+ -| ``y`` *(2nd arg)* | -+---------------------------------+ -| ``x`` *(1st arg)* | -+---------------------------------+ -| ``obj`` *(im_self)* | -+---------------------------------+ -| ``function object`` *(im_func)* | -+---------------------------------+ - -The ``CALL_METHOD N`` bytecode emulates a bound method call by -inspecting the *im_self* entry in the stack below the ``N`` arguments: -if it is not None, then it is considered to be an additional first -argument in the call to the *im_func* object from the stack. - -You can enable this feature with the :config:`objspace.opcodes.CALL_METHOD` -option. - -.. _`call likely builtin`: - -CALL_LIKELY_BUILTIN -+++++++++++++++++++ - -A often heard "tip" for speeding up Python programs is to give an often used -builtin a local name, since local lookups are faster than lookups of builtins, -which involve doing two dictionary lookups: one in the globals dictionary and -one in the the builtins dictionary. PyPy approaches this problem at the -implementation level, with the introduction of the new ``CALL_LIKELY_BUILTIN`` -bytecode. This bytecode is produced by the compiler for a call whose target is -the name of a builtin. Since such a syntactic construct is very often actually -invoking the expected builtin at run-time, this information can be used to make -the call to the builtin directly, without going through any dictionary lookup. - -However, it can occur that the name is shadowed by a global name from the -current module. To catch this case, a special dictionary implementation for -multidicts is introduced, which is used for the dictionaries of modules. This -implementation keeps track which builtin name is shadowed by it. The -``CALL_LIKELY_BUILTIN`` bytecode asks the dictionary whether it is shadowing the -builtin that is about to be called and asks the dictionary of ``__builtin__`` -whether the original builtin was changed. These two checks are cheaper than -full lookups. In the common case, neither of these cases is true, so the -builtin can be directly invoked. - -You can enable this feature with the -:config:`objspace.opcodes.CALL_LIKELY_BUILTIN` option. - -.. more here? - -Overall Effects -=============== - -The impact these various optimizations have on performance unsurprisingly -depends on the program being run. Using the default multi-dict implementation that -simply special cases string-keyed dictionaries is a clear win on all benchmarks, -improving results by anything from 15-40 per cent. - -Another optimization, or rather set of optimizations, that has a uniformly good -effect is the set of three 'method optimizations', i.e. shadow tracking, the -method cache and the LOOKUP_METHOD and CALL_METHOD opcodes. On a heavily -object-oriented benchmark (richards) they combine to give a speed-up of nearly -50%, and even on the extremely un-object-oriented pystone benchmark, the -improvement is over 20%. - -.. waffles about ropes - -When building pypy, all generally useful optimizations are turned on by default -unless you explicitly lower the translation optimization level with the -``--opt`` option. diff --git a/pypy/doc/discussion/distribution-roadmap.txt b/pypy/doc/discussion/distribution-roadmap.txt deleted file mode 100644 --- a/pypy/doc/discussion/distribution-roadmap.txt +++ /dev/null @@ -1,72 +0,0 @@ -Distribution: -============= - -Some random thoughts about automatic (or not) distribution layer. - -What I want to achieve is to make clean approach to perform -distribution mechanism with virtually any distribution heuristic. - -First step - RPython level: ---------------------------- - -First (simplest) step is to allow user to write RPython programs with -some kind of remote control over program execution. For start I would -suggest using RMI (Remote Method Invocation) and remote object access -(in case of low level it would be struct access). For the simplicity -it will make some sense to target high-level platform at the beginning -(CLI platform seems like obvious choice), which provides more primitives -for performing such operations. To make attempt easier, I'll provide -some subset of type system to be serializable which can go as parameters -to such a call. - -I take advantage of several assumptions: - -* globals are constants - this allows us to just run multiple instances - of the same program on multiple machines and perform RMI. - -* I/O is explicit - this makes GIL problem not that important. XXX: I've got - to read more about GIL to notice if this is true. - -Second step - doing it a little bit more automatically: -------------------------------------------------------- - -The second step is to allow some heuristic to live and change -calls to RMI calls. This should follow some assumptions (which may vary, -regarding implementation): - -* Not to move I/O to different machine (we can track I/O and side-effects - in RPython code). - -* Make sure all C calls are safe to transfer if we want to do that (this - depends on probably static API declaration from programmer "I'm sure this - C call has no side-effects", we don't want to check it in C) or not transfer - them at all. - -* Perform it all statically, at the time of program compilation. - -* We have to generate serialization methods for some classes, which - we want to transfer (Same engine might be used to allow JSON calls in JS - backend to transfer arbitrary python object). - -Third step - Just-in-time distribution: ---------------------------------------- - -The biggest step here is to provide JIT integration into distribution -system. This should allow to make it really useful (probably compile-time -distribution will not work for example for whole Python interpreter, because -of too huge granularity). This is quite unclear for me how to do that -(JIT is not complete and I don't know too much about it). Probably we -take JIT information about graphs and try to feed it to heuristic in some way -to change the calls into RMI. - -Problems to fight with: ------------------------ - -Most problems are to make mechanism working efficiently, so: - -* Avoid too much granularity (copying a lot of objects in both directions - all the time) - -* Make heuristic not eat too much CPU time/memory and all of that. - -* ... diff --git a/pypy/doc/config/objspace.usemodules._sre.txt b/pypy/doc/config/objspace.usemodules._sre.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules._sre.txt +++ /dev/null @@ -1,2 +0,0 @@ -Use the '_sre' module. -This module is expected to be working and is included by default. From commits-noreply at bitbucket.org Sat Mar 26 10:15:23 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 10:15:23 +0100 (CET) Subject: [pypy-svn] pypy jit-usable_retrace: resolved merge Message-ID: <20110326091523.4A9C1282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42958:e7e4bc716e07 Date: 2011-03-26 09:39 +0100 http://bitbucket.org/pypy/pypy/changeset/e7e4bc716e07/ Log: resolved merge diff --git a/pypy/jit/metainterp/optimizeopt/__init__.py b/pypy/jit/metainterp/optimizeopt/__init__.py --- a/pypy/jit/metainterp/optimizeopt/__init__.py +++ b/pypy/jit/metainterp/optimizeopt/__init__.py @@ -26,6 +26,7 @@ PARAMETERS['enable_opts'] = ALL_OPTS_NAMES def optimize_loop_1(metainterp_sd, loop, enable_opts, + inline_short_preamble=True, retrace=None): """Optimize loop.operations to remove internal overheadish operations. """ optimizations = [] @@ -44,7 +45,7 @@ optimizations.append(OptSimplify()) if inline_short_preamble: - optimizations = [OptInlineShortPreamble(retraced)] + optimizations + optimizations = [OptInlineShortPreamble(retrace)] + optimizations if unroll: optimize_unroll(metainterp_sd, loop, optimizations) @@ -61,7 +62,7 @@ except KeyError: pass optimize_loop_1(metainterp_sd, bridge, enable_opts, - inline_short_preamble, retraced) + inline_short_preamble, retrace) if __name__ == '__main__': print ALL_OPTS_NAMES From commits-noreply at bitbucket.org Sat Mar 26 10:15:24 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 10:15:24 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: additional passing test Message-ID: <20110326091524.3DBF9282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42959:0b1c95002df7 Date: 2011-03-26 09:51 +0100 http://bitbucket.org/pypy/pypy/changeset/0b1c95002df7/ Log: additional passing test diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5558,6 +5558,30 @@ """ self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) + def test_str_equal_nonconst(self): + ops = """ + [i1, i2] + p1 = newstr(1) + strsetitem(p1, 0, i1) + p2 = newstr(1) + strsetitem(p1, 0, i2) + i0 = call(0, p1, p2, descr=strequaldescr) + escape(i0) + jump(i1) + """ + preamble = """ + [i1, i2] + i0 = int_eq(i1, i2) # ord('x') + escape(i0) + jump(i1, i2, i0) + """ + expected = """ + [i1, i2, i0] + escape(i0) + jump(i1, i2, i0) + """ + self.optimize_strunicode_loop_extradescrs(ops, expected, preamble) + def test_str_equal_chars2(self): ops = """ [i1, i2] From commits-noreply at bitbucket.org Sat Mar 26 10:15:30 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 10:15:30 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: this case wount work Message-ID: <20110326091530.D2C292A2039@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42960:203100578cb0 Date: 2011-03-26 10:11 +0100 http://bitbucket.org/pypy/pypy/changeset/203100578cb0/ Log: this case wount work diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5089,6 +5089,20 @@ """ self.optimize_strunicode_loop(ops, expected, expected) + def test_str_slice_len_surviving(self): + ops = """ + [p1, i1, i2, i3] + p2 = call(0, p1, i1, i2, descr=strslicedescr) + i4 = strlen(p2) + jump(p1, i1, i2, i4) + """ + expected = """ + [p1, i1, i2, i3] + i4 = int_sub(i2, i1) + jump(p1, i1, i2, i4) + """ + self.optimize_strunicode_loop(ops, expected, expected) + def test_str_slice_1(self): ops = """ [p1, i1, i2] From commits-noreply at bitbucket.org Sat Mar 26 10:39:02 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 10:39:02 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: another problematic case Message-ID: <20110326093902.310AD282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42961:325d1d28e59a Date: 2011-03-26 10:38 +0100 http://bitbucket.org/pypy/pypy/changeset/325d1d28e59a/ Log: another problematic case diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5089,7 +5089,7 @@ """ self.optimize_strunicode_loop(ops, expected, expected) - def test_str_slice_len_surviving(self): + def test_str_slice_len_surviving1(self): ops = """ [p1, i1, i2, i3] p2 = call(0, p1, i1, i2, descr=strslicedescr) @@ -5103,6 +5103,32 @@ """ self.optimize_strunicode_loop(ops, expected, expected) + def test_str_slice_len_surviving2(self): + ops = """ + [p1, i1, i2, p2] + i5 = getfield_gc(p2, descr=valuedescr) + escape(i5) + p3 = call(0, p1, i1, i2, descr=strslicedescr) + i4 = strlen(p3) + setfield_gc(p2, i4, descr=valuedescr) + jump(p1, i1, i2, p2) + """ + preamble = """ + [p1, i1, i2, p2] + i5 = getfield_gc(p2, descr=valuedescr) + escape(i5) + i4 = int_sub(i2, i1) + setfield_gc(p2, i4, descr=valuedescr) + jump(p1, i1, i2, p2, i4) + """ + expected = """ + [p1, i1, i2, p2, i5] + escape(i5) + setfield_gc(p2, i5, descr=valuedescr) + jump(p1, i1, i2, p2, i5) + """ + self.optimize_strunicode_loop(ops, expected, preamble) + def test_str_slice_1(self): ops = """ [p1, i1, i2] From commits-noreply at bitbucket.org Sat Mar 26 11:41:39 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 11:41:39 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: workaround allowing the unrolling to fail and produce a preamble calling itself instead of chrasing Message-ID: <20110326104139.63B87282B9C@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42962:5a9236876435 Date: 2011-03-26 11:41 +0100 http://bitbucket.org/pypy/pypy/changeset/5a9236876435/ Log: workaround allowing the unrolling to fail and produce a preamble calling itself instead of chrasing diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -267,9 +267,17 @@ virtual_state = modifier.get_virtual_state(jump_args) loop.preamble.operations = self.optimizer.newoperations + preamble_optimizer = self.optimizer self.optimizer = self.optimizer.reconstruct_for_next_iteration(jump_args) - inputargs = self.inline(self.cloned_operations, - loop.inputargs, jump_args) + try: + inputargs = self.inline(self.cloned_operations, + loop.inputargs, jump_args) + except KeyError: + debug_print("Unrolling failed.") + loop.preamble.operations = None + jumpop.initarglist(jump_args) + preamble_optimizer.send_extra_operation(jumpop) + return loop.inputargs = inputargs jmp = ResOperation(rop.JUMP, loop.inputargs[:], None) jmp.setdescr(loop.token) diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -199,7 +199,10 @@ print print loop.preamble.inputargs - print '\n'.join([str(o) for o in loop.preamble.operations]) + if loop.preamble.operations: + print '\n'.join([str(o) for o in loop.preamble.operations]) + else: + print 'Failed!' print print loop.inputargs print '\n'.join([str(o) for o in loop.operations]) @@ -5101,7 +5104,8 @@ i4 = int_sub(i2, i1) jump(p1, i1, i2, i4) """ - self.optimize_strunicode_loop(ops, expected, expected) + #self.optimize_strunicode_loop(ops, expected, expected) + self.optimize_loop(ops, expected) def test_str_slice_len_surviving2(self): ops = """ @@ -5119,7 +5123,7 @@ escape(i5) i4 = int_sub(i2, i1) setfield_gc(p2, i4, descr=valuedescr) - jump(p1, i1, i2, p2, i4) + jump(p1, i1, i2, p2) """ expected = """ [p1, i1, i2, p2, i5] @@ -5127,7 +5131,8 @@ setfield_gc(p2, i5, descr=valuedescr) jump(p1, i1, i2, p2, i5) """ - self.optimize_strunicode_loop(ops, expected, preamble) + #self.optimize_strunicode_loop(ops, expected, preamble) + self.optimize_loop(ops, preamble) def test_str_slice_1(self): ops = """ diff --git a/pypy/jit/metainterp/test/test_string.py b/pypy/jit/metainterp/test/test_string.py --- a/pypy/jit/metainterp/test/test_string.py +++ b/pypy/jit/metainterp/test/test_string.py @@ -323,6 +323,20 @@ self.meta_interp(f, [6, 7]) self.check_loops(newstr=0, newunicode=0) + def test_str_slice_len_surviving(self): + _str = self._str + longstring = _str("Unrolling Trouble") + mydriver = JitDriver(reds = ['i', 'a', 'sa'], greens = []) + def f(a): + i = sa = a + while i < len(longstring): + mydriver.jit_merge_point(i=i, a=a, sa=sa) + assert a >= 0 and i >= 0 + i = len(longstring[a:i+1]) + sa += i + return sa + assert self.meta_interp(f, [0]) == f(0) + #class TestOOtype(StringTests, OOJitMixin): # CALL = "oosend" From commits-noreply at bitbucket.org Sat Mar 26 11:47:43 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 11:47:43 +0100 (CET) Subject: [pypy-svn] pypy default: Fix running py.test in this directory. Message-ID: <20110326104743.4692D282BA1@codespeak.net> Author: Armin Rigo Branch: Changeset: r42963:771a50208bf8 Date: 2011-03-25 22:54 +0100 http://bitbucket.org/pypy/pypy/changeset/771a50208bf8/ Log: Fix running py.test in this directory. diff --git a/dotviewer/conftest.py b/dotviewer/conftest.py --- a/dotviewer/conftest.py +++ b/dotviewer/conftest.py @@ -6,4 +6,6 @@ dest="pygame", default=False, help="allow interactive tests using Pygame") -option = py.test.config.option +def pytest_configure(config): + global option + option = config.option From commits-noreply at bitbucket.org Sat Mar 26 12:06:24 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 12:06:24 +0100 (CET) Subject: [pypy-svn] pypy default: On x86-64, put the correct dfi assembler macros to allow gdb Message-ID: <20110326110624.E9832282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42964:95d7bc777445 Date: 2011-03-24 11:28 +0100 http://bitbucket.org/pypy/pypy/changeset/95d7bc777445/ Log: On x86-64, put the correct dfi assembler macros to allow gdb to debug the function (and walk past it in the backtrace). diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py --- a/pypy/translator/c/gcc/trackgcroot.py +++ b/pypy/translator/c/gcc/trackgcroot.py @@ -1647,6 +1647,7 @@ print >> output, """\ /* See description in asmgcroot.py */ + .cfi_startproc movq\t%rdi, %rdx\t/* 1st argument, which is the callback */ movq\t%rsi, %rcx\t/* 2nd argument, which is gcrootanchor */ movq\t%rsp, %rax\t/* my frame top address */ @@ -1666,6 +1667,7 @@ pushq\t%rcx\t\t\t/* self->prev = gcrootanchor */ movq\t%rsp, 8(%rcx)\t/* gcrootanchor->next = self */ movq\t%rsp, 0(%rax)\t\t\t/* next->prev = self */ + .cfi_def_cfa_offset 80\t/* 9 pushes + the retaddr = 80 bytes */ /* note: the Mac OS X 16 bytes aligment must be respected. */ call\t*%rdx\t\t/* invoke the callback */ @@ -1687,6 +1689,7 @@ /* the return value is the one of the 'call' above, */ /* because %rax (and possibly %rdx) are unmodified */ ret + .cfi_endproc """ _variant(elf64='.size pypy_asm_stackwalk, .-pypy_asm_stackwalk', darwin64='') From commits-noreply at bitbucket.org Sat Mar 26 12:23:35 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 12:23:35 +0100 (CET) Subject: [pypy-svn] pypy default: Kill the test for division_to_rshift, because it's already tested Message-ID: <20110326112335.AE2F736C20C@codespeak.net> Author: Armin Rigo Branch: Changeset: r42965:c8e4be12f7b1 Date: 2011-03-26 12:23 +0100 http://bitbucket.org/pypy/pypy/changeset/c8e4be12f7b1/ Log: Kill the test for division_to_rshift, because it's already tested directly in test_optimizeopt.py. Also, it seems that the trace only contained additions, because the actual divisions were all constant- folded, so the test was not testing much. diff --git a/pypy/module/pypyjit/test/test_pypy_c.py b/pypy/module/pypyjit/test/test_pypy_c.py --- a/pypy/module/pypyjit/test/test_pypy_c.py +++ b/pypy/module/pypyjit/test/test_pypy_c.py @@ -1409,39 +1409,7 @@ i += 1 return long(sa) ''', 93, count_debug_merge_point=False, *tests) - - def test_division_to_rshift(self): - avalues = ('a', 'b', 7, -42, 8) - bvalues = ['b'] + range(-10, 0) + range(1,10) - code = '' - a1, b1, res1 = 10, 20, 0 - a2, b2, res2 = 10, -20, 0 - a3, b3, res3 = -10, -20, 0 - def dd(a, b, aval, bval): - m = {'a': aval, 'b': bval} - if not isinstance(a, int): - a=m[a] - if not isinstance(b, int): - b=m[b] - return a/b - for a in avalues: - for b in bvalues: - code += ' sa += %s / %s\n' % (a, b) - res1 += dd(a, b, a1, b1) - res2 += dd(a, b, a2, b2) - res3 += dd(a, b, a3, b3) - self.run_source(''' - def main(a, b): - i = sa = 0 - while i < 2000: -%s - i += 1 - return sa - ''' % code, 179, ([a1, b1], 2000 * res1), - ([a2, b2], 2000 * res2), - ([a3, b3], 2000 * res3), - count_debug_merge_point=False) - + def test_mod(self): avalues = ('a', 'b', 7, -42, 8) bvalues = ['b'] + range(-10, 0) + range(1,10) From commits-noreply at bitbucket.org Sat Mar 26 12:42:08 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 12:42:08 +0100 (CET) Subject: [pypy-svn] pypy default: Fix the test that broke when changing how "x/y" is implemented. Message-ID: <20110326114208.89EE6282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42966:2c490f3f66e3 Date: 2011-03-26 12:41 +0100 http://bitbucket.org/pypy/pypy/changeset/2c490f3f66e3/ Log: Fix the test that broke when changing how "x/y" is implemented. Rewrite it in a way that doesn't use "/" at all. diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -285,7 +285,7 @@ self.match_var(op.res, exp_res) self._assert(len(op.args) == len(exp_args), "wrong number of arguments") for arg, exp_arg in zip(op.args, exp_args): - self._assert(self.match_var(arg, exp_arg), "variable mismatch") + self._assert(self.match_var(arg, exp_arg), "variable mismatch: %r instead of %r" % (arg, exp_arg)) self.match_descr(op.descr, exp_descr) diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -1012,7 +1012,7 @@ def main(n): i = 1 while i < n: - i += len(xrange(i)) / i + i += len(xrange(i+1)) - i return i log = self.run(main, [10000]) @@ -1023,17 +1023,16 @@ guard_true(i10, descr=) # This can be improved if the JIT realized the lookup of i5 produces # a constant and thus can be removed entirely - i12 = int_sub(i5, 1) - i13 = uint_floordiv(i12, i7) + i120 = int_add(i5, 1) + i140 = int_lt(0, i120) + guard_true(i140, descr=) + i13 = uint_floordiv(i5, i7) i15 = int_add(i13, 1) i17 = int_lt(i15, 0) - guard_false(i17, descr=) - i18 = int_floordiv(i15, i5) - i19 = int_xor(i15, i5) - i20 = int_mod(i15, i5) - i21 = int_is_true(i20) - i22 = int_add_ovf(i5, i18) - guard_no_overflow(descr=) + guard_false(i17, descr=) + i20 = int_sub(i15, i5) + i21 = int_add_ovf(i5, i20) + guard_no_overflow(descr=) --TICK-- - jump(p0, p1, p2, p3, p4, i22, i6, i7, p8, p9, descr=) + jump(p0, p1, p2, p3, p4, i21, i6, i7, p8, p9, descr=) """) From commits-noreply at bitbucket.org Sat Mar 26 12:52:47 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 12:52:47 +0100 (CET) Subject: [pypy-svn] pypy default: Backout c8e4be12f7b1, whose purpose is actually to test that Message-ID: <20110326115247.4D839282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42967:3cf518b02197 Date: 2011-03-26 12:49 +0100 http://bitbucket.org/pypy/pypy/changeset/3cf518b02197/ Log: Backout c8e4be12f7b1, whose purpose is actually to test that we get the correct results. diff --git a/pypy/module/pypyjit/test/test_pypy_c.py b/pypy/module/pypyjit/test/test_pypy_c.py --- a/pypy/module/pypyjit/test/test_pypy_c.py +++ b/pypy/module/pypyjit/test/test_pypy_c.py @@ -1409,7 +1409,39 @@ i += 1 return long(sa) ''', 93, count_debug_merge_point=False, *tests) - + + def test_division_to_rshift(self): + avalues = ('a', 'b', 7, -42, 8) + bvalues = ['b'] + range(-10, 0) + range(1,10) + code = '' + a1, b1, res1 = 10, 20, 0 + a2, b2, res2 = 10, -20, 0 + a3, b3, res3 = -10, -20, 0 + def dd(a, b, aval, bval): + m = {'a': aval, 'b': bval} + if not isinstance(a, int): + a=m[a] + if not isinstance(b, int): + b=m[b] + return a/b + for a in avalues: + for b in bvalues: + code += ' sa += %s / %s\n' % (a, b) + res1 += dd(a, b, a1, b1) + res2 += dd(a, b, a2, b2) + res3 += dd(a, b, a3, b3) + self.run_source(''' + def main(a, b): + i = sa = 0 + while i < 2000: +%s + i += 1 + return sa + ''' % code, 179, ([a1, b1], 2000 * res1), + ([a2, b2], 2000 * res2), + ([a3, b3], 2000 * res3), + count_debug_merge_point=False) + def test_mod(self): avalues = ('a', 'b', 7, -42, 8) bvalues = ['b'] + range(-10, 0) + range(1,10) From commits-noreply at bitbucket.org Sat Mar 26 12:52:47 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 12:52:47 +0100 (CET) Subject: [pypy-svn] pypy default: The purpose of these tests is not to count the number of Message-ID: <20110326115247.D853D282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42968:8266b096adc5 Date: 2011-03-26 12:52 +0100 http://bitbucket.org/pypy/pypy/changeset/8266b096adc5/ Log: The purpose of these tests is not to count the number of operations. diff --git a/pypy/module/pypyjit/test/test_pypy_c.py b/pypy/module/pypyjit/test/test_pypy_c.py --- a/pypy/module/pypyjit/test/test_pypy_c.py +++ b/pypy/module/pypyjit/test/test_pypy_c.py @@ -1430,6 +1430,8 @@ res1 += dd(a, b, a1, b1) res2 += dd(a, b, a2, b2) res3 += dd(a, b, a3, b3) + # The purpose of this test is to check that we get + # the correct results, not really to count operations. self.run_source(''' def main(a, b): i = sa = 0 @@ -1437,11 +1439,10 @@ %s i += 1 return sa - ''' % code, 179, ([a1, b1], 2000 * res1), - ([a2, b2], 2000 * res2), - ([a3, b3], 2000 * res3), - count_debug_merge_point=False) - + ''' % code, sys.maxint, ([a1, b1], 2000 * res1), + ([a2, b2], 2000 * res2), + ([a3, b3], 2000 * res3)) + def test_mod(self): avalues = ('a', 'b', 7, -42, 8) bvalues = ['b'] + range(-10, 0) + range(1,10) @@ -1462,6 +1463,8 @@ res1 += dd(a, b, a1, b1) res2 += dd(a, b, a2, b2) res3 += dd(a, b, a3, b3) + # The purpose of this test is to check that we get + # the correct results, not really to count operations. self.run_source(''' def main(a, b): i = sa = 0 @@ -1471,11 +1474,10 @@ %s i += 1 return sa - ''' % code, 450, ([a1, b1], 2000 * res1), - ([a2, b2], 2000 * res2), - ([a3, b3], 2000 * res3), - count_debug_merge_point=False) - + ''' % code, sys.maxint, ([a1, b1], 2000 * res1), + ([a2, b2], 2000 * res2), + ([a3, b3], 2000 * res3)) + def test_dont_trace_every_iteration(self): self.run_source(''' def main(a, b): From commits-noreply at bitbucket.org Sat Mar 26 12:55:22 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 12:55:22 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: typos Message-ID: <20110326115522.39B63282BDE@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42969:39fb10e528d8 Date: 2011-03-26 12:54 +0100 http://bitbucket.org/pypy/pypy/changeset/39fb10e528d8/ Log: typos diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5609,14 +5609,14 @@ p1 = newstr(1) strsetitem(p1, 0, i1) p2 = newstr(1) - strsetitem(p1, 0, i2) + strsetitem(p2, 0, i2) i0 = call(0, p1, p2, descr=strequaldescr) escape(i0) - jump(i1) + jump(i1, i2) """ preamble = """ [i1, i2] - i0 = int_eq(i1, i2) # ord('x') + i0 = int_eq(i1, i2) escape(i0) jump(i1, i2, i0) """ From commits-noreply at bitbucket.org Sat Mar 26 12:56:10 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sat, 26 Mar 2011 12:56:10 +0100 (CET) Subject: [pypy-svn] pypy jit-str_in_preamble: hg merge default Message-ID: <20110326115610.D282A282BDE@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42970:c4c903f389fd Date: 2011-03-26 12:55 +0100 http://bitbucket.org/pypy/pypy/changeset/c4c903f389fd/ Log: hg merge default diff --git a/pypy/module/imp/app_imp.py b/pypy/module/imp/app_imp.py deleted file mode 100644 --- a/pypy/module/imp/app_imp.py +++ /dev/null @@ -1,5 +0,0 @@ - - -def load_dynamic(name, pathname, file=None): - """Always raises ah ImportError on pypy""" - raise ImportError('Not implemented') From commits-noreply at bitbucket.org Sat Mar 26 14:11:41 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 14:11:41 +0100 (CET) Subject: [pypy-svn] pypy default: Test and implementation for "i0 > i0" and friends. Message-ID: <20110326131141.9FC91282BDD@codespeak.net> Author: Armin Rigo Branch: Changeset: r42971:b5865efaed32 Date: 2011-03-26 14:11 +0100 http://bitbucket.org/pypy/pypy/changeset/b5865efaed32/ Log: Test and implementation for "i0 > i0" and friends. Not implemented for unsigned comparisons so far (does not seem really useful). diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -816,6 +816,52 @@ """ self.optimize_loop(ops, expected, preamble) + def test_compare_with_itself(self): + ops = """ + [] + i0 = escape() + i1 = int_lt(i0, i0) + guard_false(i1) [] + i2 = int_le(i0, i0) + guard_true(i2) [] + i3 = int_eq(i0, i0) + guard_true(i3) [] + i4 = int_ne(i0, i0) + guard_false(i4) [] + i5 = int_gt(i0, i0) + guard_false(i5) [] + i6 = int_ge(i0, i0) + guard_true(i6) [] + jump() + """ + expected = """ + [] + i0 = escape() + jump() + """ + self.optimize_loop(ops, expected) + + def test_compare_with_itself_uint(self): + py.test.skip("implement me") + ops = """ + [] + i0 = escape() + i7 = uint_lt(i0, i0) + guard_false(i7) [] + i8 = uint_le(i0, i0) + guard_true(i8) [] + i9 = uint_gt(i0, i0) + guard_false(i9) [] + i10 = uint_ge(i0, i0) + guard_true(i10) [] + jump() + """ + expected = """ + [] + i0 = escape() + jump() + """ + self.optimize_loop(ops, expected) diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -205,7 +205,7 @@ v2 = self.getvalue(op.getarg(1)) if v1.intbound.known_lt(v2.intbound): self.make_constant_int(op.result, 1) - elif v1.intbound.known_ge(v2.intbound): + elif v1.intbound.known_ge(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 0) else: self.emit_operation(op) @@ -215,7 +215,7 @@ v2 = self.getvalue(op.getarg(1)) if v1.intbound.known_gt(v2.intbound): self.make_constant_int(op.result, 1) - elif v1.intbound.known_le(v2.intbound): + elif v1.intbound.known_le(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 0) else: self.emit_operation(op) @@ -223,7 +223,7 @@ def optimize_INT_LE(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) - if v1.intbound.known_le(v2.intbound): + if v1.intbound.known_le(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 1) elif v1.intbound.known_gt(v2.intbound): self.make_constant_int(op.result, 0) @@ -233,7 +233,7 @@ def optimize_INT_GE(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) - if v1.intbound.known_ge(v2.intbound): + if v1.intbound.known_ge(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 1) elif v1.intbound.known_lt(v2.intbound): self.make_constant_int(op.result, 0) From commits-noreply at bitbucket.org Sat Mar 26 17:05:31 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:31 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Temporary implementation of CALL_RELEASE_GIL in x86, not actually releasing the Message-ID: <20110326160531.7B2A2282B9E@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42972:57238c41b55f Date: 2011-03-24 10:43 +0100 http://bitbucket.org/pypy/pypy/changeset/57238c41b55f/ Log: Temporary implementation of CALL_RELEASE_GIL in x86, not actually releasing the gil at all nor closing the gc stack so far. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -1832,6 +1832,8 @@ self.mc.CMP_bi(FORCE_INDEX_OFS, 0) self.implement_guard(guard_token, 'L') + genop_guard_call_release_gil = genop_guard_call_may_force + def genop_guard_call_assembler(self, op, guard_op, guard_token, arglocs, result_loc): faildescr = guard_op.getdescr() diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -534,23 +534,6 @@ 'int', descr=calldescr) assert res.value == func_ints(*args) - def test_call_to_c_function(self): - from pypy.rlib.libffi import CDLL, types, ArgChain - from pypy.rpython.lltypesystem.ll2ctypes import libc_name - libc = CDLL(libc_name) - c_tolower = libc.getpointer('tolower', [types.uchar], types.sint) - argchain = ArgChain().arg(ord('A')) - assert c_tolower.call(argchain, rffi.INT) == ord('a') - - func_adr = llmemory.cast_ptr_to_adr(c_tolower.funcsym) - funcbox = ConstInt(heaptracker.adr2int(func_adr)) - calldescr = self.cpu.calldescrof_dynamic([types.uchar], types.sint) - res = self.execute_operation(rop.CALL_RELEASE_GIL, - [funcbox, BoxInt(ord('A'))], - 'int', - descr=calldescr) - assert res.value == ord('a') - def test_call_with_const_floats(self): def func(f1, f2): return f1 + f2 @@ -1804,6 +1787,36 @@ assert self.cpu.get_latest_value_int(2) == 10 assert values == [1, 10] + def test_call_to_c_function(self): + from pypy.rlib.libffi import CDLL, types, ArgChain + from pypy.rpython.lltypesystem.ll2ctypes import libc_name + libc = CDLL(libc_name) + c_tolower = libc.getpointer('tolower', [types.uchar], types.sint) + argchain = ArgChain().arg(ord('A')) + assert c_tolower.call(argchain, rffi.INT) == ord('a') + + cpu = self.cpu + func_adr = llmemory.cast_ptr_to_adr(c_tolower.funcsym) + funcbox = ConstInt(heaptracker.adr2int(func_adr)) + calldescr = cpu.calldescrof_dynamic([types.uchar], types.sint) + i1 = BoxInt() + i2 = BoxInt() + tok = BoxInt() + faildescr = BasicFailDescr(1) + ops = [ + ResOperation(rop.CALL_RELEASE_GIL, [funcbox, i1], i2, + descr=calldescr), + ResOperation(rop.GUARD_NOT_FORCED, [], None, descr=faildescr), + ResOperation(rop.FINISH, [i2], None, descr=BasicFailDescr(0)) + ] + ops[1].setfailargs([i1, i2]) + looptoken = LoopToken() + self.cpu.compile_loop([i1], ops, looptoken) + self.cpu.set_future_value_int(0, ord('G')) + fail = self.cpu.execute_token(looptoken) + assert fail.identifier == 0 + assert self.cpu.get_latest_value_int(0) == ord('g') + # pure do_ / descr features def test_do_operations(self): diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -355,7 +355,9 @@ self.assembler.regalloc_perform_discard(op, arglocs) def can_merge_with_next_guard(self, op, i, operations): - if op.getopnum() == rop.CALL_MAY_FORCE or op.getopnum() == rop.CALL_ASSEMBLER: + if (op.getopnum() == rop.CALL_MAY_FORCE or + op.getopnum() == rop.CALL_ASSEMBLER or + op.getopnum() == rop.CALL_RELEASE_GIL): assert operations[i + 1].getopnum() == rop.GUARD_NOT_FORCED return True if not op.is_comparison(): @@ -798,6 +800,10 @@ assert guard_op is not None self._consider_call(op, guard_op) + def consider_call_release_gil(self, op, guard_op): + assert guard_op is not None + self._consider_call(op, guard_op) + def consider_call_assembler(self, op, guard_op): descr = op.getdescr() assert isinstance(descr, LoopToken) @@ -1247,7 +1253,9 @@ name = name[len('consider_'):] num = getattr(rop, name.upper()) if (is_comparison_or_ovf_op(num) - or num == rop.CALL_MAY_FORCE or num == rop.CALL_ASSEMBLER): + or num == rop.CALL_MAY_FORCE + or num == rop.CALL_ASSEMBLER + or num == rop.CALL_RELEASE_GIL): oplist_with_guard[num] = value oplist[num] = add_none_argument(value) else: From commits-noreply at bitbucket.org Sat Mar 26 17:05:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:32 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: A test for CALL_RELEASE_GIL with a callback. Message-ID: <20110326160532.271D1282B9E@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42973:affd22c6dcab Date: 2011-03-24 11:00 +0100 http://bitbucket.org/pypy/pypy/changeset/affd22c6dcab/ Log: A test for CALL_RELEASE_GIL with a callback. diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -1817,6 +1817,69 @@ assert fail.identifier == 0 assert self.cpu.get_latest_value_int(0) == ord('g') + def test_call_to_c_function_with_callback(self): + from pypy.rlib.libffi import CDLL, types, ArgChain, clibffi + from pypy.rpython.lltypesystem.ll2ctypes import libc_name + libc = CDLL(libc_name) + types_size_t = clibffi.cast_type_to_ffitype(rffi.SIZE_T) + c_qsort = libc.getpointer('qsort', [types.pointer, types_size_t, + types_size_t, types.pointer], + types.void) + class Glob(object): + pass + glob = Glob() + class X(object): + pass + # + def callback(p1, p2): + glob.lst.append(X()) + return rffi.cast(rffi.INT, 1) + CALLBACK = lltype.Ptr(lltype.FuncType([lltype.Signed, + lltype.Signed], rffi.INT)) + fn = llhelper(CALLBACK, callback) + S = lltype.Struct('S', ('x', rffi.INT), ('y', rffi.INT)) + raw = lltype.malloc(S, flavor='raw') + argchain = ArgChain() + argchain = argchain.arg(rffi.cast(lltype.Signed, raw)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 2)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 4)) + argchain = argchain.arg(rffi.cast(lltype.Signed, fn)) + glob.lst = [] + c_qsort.call(argchain, lltype.Void) + assert len(glob.lst) > 0 + del glob.lst[:] + + cpu = self.cpu + func_adr = llmemory.cast_ptr_to_adr(c_qsort.funcsym) + funcbox = ConstInt(heaptracker.adr2int(func_adr)) + calldescr = cpu.calldescrof_dynamic([types.pointer, types_size_t, + types_size_t, types.pointer], + types.void) + i0 = BoxInt() + i1 = BoxInt() + i2 = BoxInt() + i3 = BoxInt() + tok = BoxInt() + faildescr = BasicFailDescr(1) + ops = [ + ResOperation(rop.CALL_RELEASE_GIL, [funcbox, i0, i1, i2, i3], None, + descr=calldescr), + ResOperation(rop.GUARD_NOT_FORCED, [], None, descr=faildescr), + ResOperation(rop.FINISH, [], None, descr=BasicFailDescr(0)) + ] + ops[1].setfailargs([]) + looptoken = LoopToken() + self.cpu.compile_loop([i0, i1, i2, i3], ops, looptoken) + self.cpu.set_future_value_int(0, rffi.cast(lltype.Signed, raw)) + self.cpu.set_future_value_int(1, 2) + self.cpu.set_future_value_int(2, 4) + self.cpu.set_future_value_int(3, rffi.cast(lltype.Signed, fn)) + assert glob.lst == [] + fail = self.cpu.execute_token(looptoken) + assert fail.identifier == 0 + assert len(glob.lst) > 0 + lltype.free(raw, flavor='raw') + # pure do_ / descr features def test_do_operations(self): From commits-noreply at bitbucket.org Sat Mar 26 17:05:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:32 +0100 (CET) Subject: [pypy-svn] pypy default: Move this hack into an official interface. Message-ID: <20110326160532.E1364282B9E@codespeak.net> Author: Armin Rigo Branch: Changeset: r42974:688735e962a3 Date: 2011-03-26 15:17 +0100 http://bitbucket.org/pypy/pypy/changeset/688735e962a3/ Log: Move this hack into an official interface. diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -305,6 +305,14 @@ stackcounter = StackCounter() stackcounter._freeze_() +def llexternal_use_eci(compilation_info): + """Return a dummy function that, if called in a RPython program, + adds the given ExternalCompilationInfo to it.""" + eci = ExternalCompilationInfo(post_include_bits=['#define PYPY_NO_OP()']) + eci = eci.merge(compilation_info) + return llexternal('PYPY_NO_OP', [], lltype.Void, + compilation_info=eci, sandboxsafe=True, _nowrapper=True) + # ____________________________________________________________ # Few helpers for keeping callback arguments alive # this makes passing opaque objects possible (they don't even pass diff --git a/pypy/jit/backend/x86/support.py b/pypy/jit/backend/x86/support.py --- a/pypy/jit/backend/x86/support.py +++ b/pypy/jit/backend/x86/support.py @@ -38,11 +38,7 @@ if sys.platform == 'win32': ensure_sse2_floats = lambda : None else: - _sse2_eci = ExternalCompilationInfo( + ensure_sse2_floats = rffi.llexternal_use_eci(ExternalCompilationInfo( compile_extra = ['-msse2', '-mfpmath=sse', '-DPYPY_CPU_HAS_STANDARD_PRECISION'], - separate_module_sources = ['void PYPY_NO_OP(void) {}'], - ) - ensure_sse2_floats = rffi.llexternal('PYPY_NO_OP', [], lltype.Void, - compilation_info=_sse2_eci, - sandboxsafe=True) + )) From commits-noreply at bitbucket.org Sat Mar 26 17:05:35 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:35 +0100 (CET) Subject: [pypy-svn] pypy default: Trying to fix issue677: remove the hard-coded endianness from dtoa.c, Message-ID: <20110326160535.A16E7282BE8@codespeak.net> Author: Armin Rigo Branch: Changeset: r42975:572b3296ccd8 Date: 2011-03-26 16:07 +0100 http://bitbucket.org/pypy/pypy/changeset/572b3296ccd8/ Log: Trying to fix issue677: remove the hard-coded endianness from dtoa.c, and instead generate a stub .c file that defines the correct endianness and #includes dtoa.c. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -17,7 +17,6 @@ ^pypy/doc/.+\.html$ ^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ -^pypy/translator/c/src/dtoa.o$ ^pypy/translator/c/src/libffi_msvc/.+\.obj$ ^pypy/translator/c/src/libffi_msvc/.+\.dll$ ^pypy/translator/c/src/libffi_msvc/.+\.lib$ @@ -64,4 +63,4 @@ ^pypy/doc/image/parsing_example.+\.png$ ^compiled ^.git/ -^release/ \ No newline at end of file +^release/ diff --git a/pypy/rlib/rdtoa.py b/pypy/rlib/rdtoa.py --- a/pypy/rlib/rdtoa.py +++ b/pypy/rlib/rdtoa.py @@ -5,16 +5,37 @@ from pypy.rpython.lltypesystem import lltype, rffi from pypy.rlib import jit from pypy.rlib.rstring import StringBuilder -import py +import py, sys cdir = py.path.local(pypydir) / 'translator' / 'c' include_dirs = [cdir] +# set the word endianness based on the host's endianness +if sys.byteorder == 'little': + source_file = [] +elif sys.byteorder == 'big': + source_file = ['#define WORDS_BIGENDIAN'] +else: + raise AssertionError(sys.byteorder) + +# ...and the C double's endianness +if float.__getformat__('double') == 'IEEE, little-endian': + source_file.append('#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754') +elif float.__getformat__('double') == 'IEEE, big-endian': + source_file.append('#define DOUBLE_IS_BIG_ENDIAN_IEEE754') +else: + raise AssertionError(float.__getformat__()) + +source_file.append('#include "src/dtoa.c"') +source_file = '\n\n'.join(source_file) + +# ____________________________________________________________ + eci = ExternalCompilationInfo( include_dirs = [cdir], includes = ['src/dtoa.h'], libraries = [], - separate_module_files = [cdir / 'src' / 'dtoa.c'], + separate_module_sources = [source_file], export_symbols = ['_PyPy_dg_strtod', '_PyPy_dg_dtoa', '_PyPy_dg_freedtoa', diff --git a/pypy/translator/c/src/dtoa.c b/pypy/translator/c/src/dtoa.c --- a/pypy/translator/c/src/dtoa.c +++ b/pypy/translator/c/src/dtoa.c @@ -116,7 +116,6 @@ /* Begin PYPY hacks */ /* #include "Python.h" */ -#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754 #define HAVE_UINT32_T #define HAVE_INT32_T #define HAVE_UINT64_T From commits-noreply at bitbucket.org Sat Mar 26 17:05:38 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:38 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: Add test_compile_framework_close_stack. In-progress. Message-ID: <20110326160538.748E0282BDD@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42976:97fedc576412 Date: 2011-03-26 15:20 +0000 http://bitbucket.org/pypy/pypy/changeset/97fedc576412/ Log: Add test_compile_framework_close_stack. In-progress. I don't understand so far how it passes, given that the functionality is not implemented. diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -236,7 +236,7 @@ def __init__(self): self.callbacks = {} -def _make_wrapper_for(TP, callable, callbackholder, aroundstate=None): +def _make_wrapper_for(TP, callable, callbackholder=None, aroundstate=None): """ Function creating wrappers for callbacks. Note that this is cheating as we assume constant callbacks and we just memoize wrappers """ @@ -247,7 +247,8 @@ else: errorcode = TP.TO.RESULT._example() callable_name = getattr(callable, '__name__', '?') - callbackholder.callbacks[callable] = True + if callbackholder is not None: + callbackholder.callbacks[callable] = True args = ', '.join(['a%d' % i for i in range(len(TP.TO.ARGS))]) source = py.code.Source(r""" def wrapper(%s): # no *args - no GIL for mallocing the tuple diff --git a/pypy/jit/codewriter/jtransform.py b/pypy/jit/codewriter/jtransform.py --- a/pypy/jit/codewriter/jtransform.py +++ b/pypy/jit/codewriter/jtransform.py @@ -754,10 +754,10 @@ from pypy.rpython.lltypesystem.rffi import size_and_sign, sizeof from pypy.rlib.rarithmetic import intmask assert not self._is_gc(op.args[0]) - size1, unsigned1 = size_and_sign(op.args[0].concretetype) size2, unsigned2 = size_and_sign(op.result.concretetype) if size2 >= sizeof(lltype.Signed): return # the target type is LONG or ULONG + size1, unsigned1 = size_and_sign(op.args[0].concretetype) # def bounds(size, unsigned): if unsigned: diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -571,3 +571,60 @@ def test_compile_framework_minimal_size_in_nursery(self): self.run('compile_framework_minimal_size_in_nursery') + + def define_compile_framework_close_stack(self): + from pypy.rlib.libffi import CDLL, types, ArgChain, clibffi + from pypy.rpython.lltypesystem.ll2ctypes import libc_name + from pypy.rpython.annlowlevel import llhelper + # + class Glob(object): + pass + glob = Glob() + class X(object): + pass + # + def callback(p1, p2): + for i in range(100): + glob.lst.append(X()) + return rffi.cast(rffi.INT, 1) + CALLBACK = lltype.Ptr(lltype.FuncType([lltype.Signed, + lltype.Signed], rffi.INT)) + # + @dont_look_inside + def alloc1(): + return llmemory.raw_malloc(16) + @dont_look_inside + def free1(p): + llmemory.raw_free(p) + # + def f42(n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s): + length = len(glob.lst) + raw = alloc1() + argchain = ArgChain() + fn = llhelper(CALLBACK, rffi._make_wrapper_for(CALLBACK, callback)) + argchain = argchain.arg(rffi.cast(lltype.Signed, raw)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 2)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 8)) + argchain = argchain.arg(rffi.cast(lltype.Signed, fn)) + glob.c_qsort.call(argchain, lltype.Void) + free1(raw) + check(len(glob.lst) > length) + del glob.lst[:] + n -= 1 + return n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s + # + def before(n, x): + libc = CDLL(libc_name) + types_size_t = clibffi.cast_type_to_ffitype(rffi.SIZE_T) + c_qsort = libc.getpointer('qsort', [types.pointer, types_size_t, + types_size_t, types.pointer], + types.void) + glob.c_qsort = c_qsort + glob.lst = [] + return (n, None, None, None, None, None, None, + None, None, None, None, None) + # + return before, f42, None + + def test_compile_framework_close_stack(self): + self.run('compile_framework_close_stack') From commits-noreply at bitbucket.org Sat Mar 26 17:05:38 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:38 +0100 (CET) Subject: [pypy-svn] pypy jitypes2: merge heads Message-ID: <20110326160538.D8708282BA1@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42977:be09dafcca93 Date: 2011-03-26 15:20 +0000 http://bitbucket.org/pypy/pypy/changeset/be09dafcca93/ Log: merge heads From commits-noreply at bitbucket.org Sat Mar 26 17:05:39 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sat, 26 Mar 2011 17:05:39 +0100 (CET) Subject: [pypy-svn] pypy default: Allow this file to be imported if 'float' has no '__getformat__', Message-ID: <20110326160539.B466D282BA1@codespeak.net> Author: Armin Rigo Branch: Changeset: r42978:fbc54ca1d7c2 Date: 2011-03-26 16:05 +0000 http://bitbucket.org/pypy/pypy/changeset/fbc54ca1d7c2/ Log: Allow this file to be imported if 'float' has no '__getformat__', as is the case in pypy 1.4.1. diff --git a/pypy/rlib/rdtoa.py b/pypy/rlib/rdtoa.py --- a/pypy/rlib/rdtoa.py +++ b/pypy/rlib/rdtoa.py @@ -11,21 +11,17 @@ include_dirs = [cdir] # set the word endianness based on the host's endianness +# and the C double's endianness (which should be equal) +if hasattr(float, '__getformat__'): + assert float.__getformat__('double') == 'IEEE, %s-endian' % sys.byteorder if sys.byteorder == 'little': - source_file = [] + source_file = ['#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754'] elif sys.byteorder == 'big': - source_file = ['#define WORDS_BIGENDIAN'] + source_file = ['#define WORDS_BIGENDIAN', + '#define DOUBLE_IS_BIG_ENDIAN_IEEE754'] else: raise AssertionError(sys.byteorder) -# ...and the C double's endianness -if float.__getformat__('double') == 'IEEE, little-endian': - source_file.append('#define DOUBLE_IS_LITTLE_ENDIAN_IEEE754') -elif float.__getformat__('double') == 'IEEE, big-endian': - source_file.append('#define DOUBLE_IS_BIG_ENDIAN_IEEE754') -else: - raise AssertionError(float.__getformat__()) - source_file.append('#include "src/dtoa.c"') source_file = '\n\n'.join(source_file) From commits-noreply at bitbucket.org Sat Mar 26 17:34:04 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 17:34:04 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: add benchmark results with various combinations of features enabled. Message-ID: <20110326163404.4860536C055@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3419:54fe72df6e46 Date: 2011-03-26 17:33 +0100 http://bitbucket.org/pypy/extradoc/changeset/54fe72df6e46/ Log: add benchmark results with various combinations of features enabled. diff --git a/talk/icooolps2011/benchmarks/paper-no-map-no-versions.json b/talk/icooolps2011/benchmarks/paper-no-map-no-versions.json new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/benchmarks/paper-no-map-no-versions.json @@ -0,0 +1,1 @@ +{"options": "", "results": [["crypto_pyaes", "RawResult", {"changed_times": [0.90968012809800003, 0.26170802116399999, 0.25785112381000003, 0.271646976471, 0.24586701393099999, 0.24329781532299999, 0.26593494415300001, 0.244066953659, 0.241585969925, 0.241628885269, 0.24107789993299999, 0.241670131683, 0.24883198738100001, 0.25121092796299999, 0.24328303337099999, 0.270493030548, 0.24111700057999999, 0.24127578735399999, 0.240916013718, 0.24128103256200001, 0.241383075714, 0.24211096763600001, 0.239951133728, 0.240896940231, 0.269706010818, 0.240354061127, 0.24149084091199999, 0.240961074829, 0.24031996726999999, 0.242077827454, 0.24312996864299999, 0.242409944534, 0.243405103683, 0.24757409095800001, 0.290009975433, 0.24828910827600001, 0.24272799491899999, 0.242501974106, 0.242834091187, 0.241096019745, 0.240152835846, 0.240782976151, 0.24129986763, 0.27049803733799999, 0.240900039673, 0.24061393737799999, 0.24026703834499999, 0.24046397209199999, 0.24000310897800001, 0.24015092849700001], "base_times": [2.8431730270400002, 2.8596279621099998, 2.84830594063, 2.86815619469, 2.9670741558099998, 2.9265959262800001, 2.92723703384, 2.9616861343399998, 2.8669378757500001, 2.8540661334999999, 2.8663120269800002, 2.8459079265599998, 2.8853631019599999, 2.8537850379899998, 2.8458979129799999, 2.8518140316, 2.8497431278200001, 2.8493149280500001, 2.8408098220800002, 2.84876990318, 2.8433921337100001, 2.8501889705700001, 2.84734416008, 2.84982514381, 2.84102082253, 2.84824991226, 2.84714984894, 2.8557260036500001, 2.8698899745899999, 2.8883328437800002, 2.8446230888400001, 2.85510110855, 2.8452558517500002, 2.8569431304899999, 2.8603079319, 2.8478608131400001, 2.9129819870000002, 2.8943500518800001, 2.8452489376100001, 2.8411419391599999, 2.84658002853, 2.8635709285700002, 2.8744399547600001, 2.9570639133499999, 2.86628007889, 2.8421561718000001, 2.9031870365099999, 2.9766941070600001, 2.8686139583600001, 2.8507399559], "pypy_op_count": 14713}], ["django", "RawResult", {"changed_times": [0.409361839294, 0.39968419075, 0.40850090980499998, 0.43737792968799999, 0.39969611167899999, 0.40908908844000003, 0.40129613876300002, 0.40819787979099997, 0.39684891700699998, 0.40701103210400003, 0.39622306823699999, 0.40736603736900001, 0.39663887023900002, 0.412101984024, 0.39635610580399999, 0.40888404846199999, 0.40793585777300001, 0.40636897087099999, 0.39608311653099998, 0.40685009956399998, 0.395928859711, 0.40767192840599997, 0.39801812171899997, 0.41140508651699997, 0.40022993087800002, 0.41680383682299998, 0.42870593071000002, 0.40220189094499997, 0.46484398841899999, 0.39915013313300002, 0.40977406501800001, 0.39693999290499998, 0.40603685378999999, 0.39754915237400001, 0.40675592422500001, 0.39628291130100002, 0.40655803680399999, 0.397660017014, 0.406586170197, 0.405714035034, 0.40726804733299998, 0.39518284797699998, 0.40726399421699999, 0.39690613746600001, 0.40632605552700002, 0.396065950394, 0.40693998336800002, 0.395999908447, 0.40625214576699997, 0.39572000503499999], "base_times": [0.99359703064000005, 0.99389290809599995, 0.99427080154400005, 0.99465894699099999, 0.99388504028299995, 0.99396109581000003, 0.99484896659900002, 0.99852204322799998, 0.99389815330499998, 0.99433708190900005, 0.99418401718100002, 0.99542903900099999, 0.99477505683900003, 1.00037693977, 0.99475002288799996, 0.99459695816000004, 0.99590921401999999, 1.01496505737, 1.0459671020500001, 1.04703593254, 1.0422430038499999, 1.0054759979200001, 1.0127670764900001, 1.00261497498, 1.0007750987999999, 1.00448012352, 1.00702595711, 1.0008399486499999, 0.99569511413599998, 0.99400901794399998, 0.99406409263600004, 0.99468493461600005, 0.99344110488899995, 0.99440884590099998, 0.99411320686299998, 0.99357295036299997, 0.99348998069799999, 0.99420499801600004, 0.99391007423400002, 0.99506902694699995, 0.99334502220200005, 0.99503016471899997, 0.99399304389999998, 0.99507188797000001, 0.99316120147700004, 0.99476599693300005, 0.99433612823499995, 0.99457788467399999, 0.99464392662000001, 0.99476480483999996], "pypy_op_count": 13106}], ["fannkuch", "RawResult", {"changed_times": [0.53282713890099997, 0.39407396316499999, 0.39206409454300001, 0.38817501068100002, 0.38837003707899997, 0.38854622840899999, 0.38604807853700002, 0.38786697387699998, 0.38789105415300001, 0.38778018951400001, 0.38464999198900002, 0.38669800758400003, 0.38773012161300002, 0.38786101341200002, 0.38868999481200001, 0.38628578186000001, 0.38350701332100001, 0.38983893394500002, 0.38299393653899999, 0.38437294960000001, 0.38608789444000002, 0.38481783866899999, 0.38570213317899998, 0.39002513885500001, 0.386386156082, 0.38542413711500001, 0.38518500328100003, 0.383751869202, 0.38267397880600001, 0.38622903823900001, 0.38364982604999998, 0.38752388954200001, 0.38348603248599999, 0.384949922562, 0.38246607780500003, 0.38364696502700002, 0.382498025894, 0.383237838745, 0.38446092605600002, 0.388746023178, 0.38745093345600001, 0.38526391983000002, 0.41176295280500003, 0.408805847168, 0.38728690147400002, 0.39274883270299998, 0.38636088371299998, 0.38640189170799999, 0.38474988937400001, 0.38589191436800002], "base_times": [2.0043020248399999, 2.0073390007, 1.9952139854399999, 2.0008580684699999, 1.98554611206, 1.97409701347, 1.9786939620999999, 2.0790672302200002, 2.03013920784, 2.0991339683499999, 2.0163910388900002, 1.9821600913999999, 1.98520112038, 1.98102498055, 1.99343204498, 1.9887850284599999, 1.99591588974, 1.97612094879, 1.9794700145699999, 1.9827270507800001, 1.9733400344800001, 1.97881388664, 1.9812049865700001, 1.9722619056699999, 1.9824829101599999, 1.9844620227800001, 1.9743411541, 1.9831020832099999, 1.9845609664899999, 1.97199606895, 1.9822628498099999, 1.9870131015800001, 1.97464108467, 1.9844839572899999, 1.9838709831200001, 1.97589802742, 1.97996997833, 1.9799330234500001, 1.9721341133100001, 1.9780488014199999, 1.9803719520600001, 1.9708559513099999, 1.9771769046800001, 1.9796018600500001, 1.9717090129899999, 1.9781260490399999, 1.98689484596, 1.99278998375, 1.98370909691, 1.9814100265500001], "pypy_op_count": 4169}], ["go", "RawResult", {"changed_times": [0.61724591255200001, 0.70614504814099999, 0.60431313514700002, 0.56376600265499999, 0.62421798706099996, 0.59825491905200001, 0.62490892410300003, 0.58459997177099998, 0.53595900535600005, 0.53137493133500002, 0.54526805877700002, 0.64253807067900004, 0.52755808830299999, 0.52279210090600003, 0.61580610275299996, 0.60318899154700001, 0.61505889892599996, 0.57422089576699997, 0.59658789634700005, 0.59845900535600005, 0.59890508651700003, 0.51722002029400005, 0.54849410057100001, 0.52876210212700003, 0.514042139053, 0.53425788879400005, 0.51968383789100003, 0.51041507720900003, 0.51750707626299997, 0.56681394576999999, 0.51937294006300005, 0.54086399078400005, 0.51348996162399996, 0.517016172409, 0.56075811386100005, 0.50965595245399997, 0.53467798232999997, 0.50504302978500004, 0.53375720977800001, 0.52729606628400005, 0.50850987434399997, 0.49878597259500002, 0.51408100128199996, 0.53317594528199996, 0.50536012649499995, 0.50333714485199998, 0.51512408256499997, 0.51808619499200004, 0.51351904869099996, 0.53780102729799995], "base_times": [0.94755005836499995, 0.94896602630600002, 0.94580793380700001, 0.94905400276200003, 0.94746804237399995, 0.94798612594599996, 0.94428610801699997, 0.95008301734900003, 0.94411087036100005, 0.94858789444000002, 0.95008897781400004, 0.95165205001800002, 0.94537496566799994, 0.95032000541700001, 0.95284795761100005, 0.94787907600400001, 0.95058917999299997, 0.94769811630199996, 0.94919896125799996, 0.94800806045499997, 0.94718313217200001, 0.98078894615199996, 0.95006299018899998, 0.94940996170000003, 0.95056200027500004, 0.94796395301799996, 0.94641494750999999, 0.94851684570299999, 0.94812989234900003, 0.946643829346, 0.95064401626600004, 0.94859600067100003, 0.950106859207, 0.94876503944400004, 0.94864702224700004, 0.94942378997800003, 0.95094609260600005, 0.94845390319800005, 0.94986391067499998, 0.94734883308399997, 0.94881701469400004, 0.94665908813499999, 0.95552802085900002, 0.95052385330199995, 0.94808387756300005, 0.94853401184099995, 0.95030283927899994, 0.95462107658399997, 0.95332312583900003, 0.95663905143700001], "pypy_op_count": 87266}], ["html5lib", "RawResult", {"changed_times": [16.261095047000001, 12.1599109173, 11.678818941099999, 11.649614095700001, 12.0981030464, 11.715470075600001, 12.0015318394, 11.596802949900001, 11.2248840332, 11.198246002199999, 11.1396479607, 11.323916196800001, 11.1689560413, 11.163488149599999, 11.2186272144, 11.1584410667, 11.1322851181, 10.996668100400001, 11.198470115699999, 11.183460950900001, 11.2130110264, 11.886582136199999, 12.131786823300001, 11.515090942400001, 11.222639083900001, 11.1527180672, 11.0736188889, 11.0619471073, 10.988390922500001, 11.063099145900001, 11.1647350788, 11.384175062200001, 11.213179111500001, 11.0771570206, 11.1129570007, 10.9827890396, 11.1152429581, 11.1721689701, 10.9488430023, 11.2089259624, 10.987019062, 11.4920239449, 11.702784061399999, 11.1414158344, 10.923774957699999, 11.076424121900001, 11.109690904600001, 11.0931081772, 10.983553886399999, 11.135076999700001], "base_times": [14.4300708771, 14.583441972699999, 14.792320012999999, 14.6171739101, 14.4988467693, 14.9017119408, 15.0939700603, 14.9997739792, 14.8922901154, 14.6031079292, 14.504671096799999, 14.680979967100001, 14.656867027300001, 14.602684974700001, 14.6278529167, 14.7529799938, 14.713897943499999, 14.8105311394, 14.734623909, 14.628256797800001, 14.743352890000001, 14.657305002199999, 14.6423289776, 14.704689979599999, 14.694815158800001, 14.6426570415, 14.605273008299999, 14.9715600014, 14.6226658821, 14.7121901512, 14.736593008, 14.616090059299999, 14.741003990199999, 14.598248958599999, 14.5658650398, 14.788245916399999, 14.7834279537, 14.705070018800001, 14.7029640675, 14.5806789398, 14.733406066900001, 15.037626981700001, 14.965485096, 14.861964941, 14.8931558132, 14.589006185500001, 14.708989858600001, 14.8662919998, 14.924582004499999, 14.869343042400001], "pypy_op_count": 172738}], ["meteor-contest", "RawResult", {"changed_times": [0.397058010101, 0.31222486496000001, 0.30470013618500003, 0.30254507064800001, 0.30421304702800001, 0.30730509758000002, 0.30748391151400001, 0.30145502090499998, 0.30126309394799999, 0.30186700820899998, 0.29999780654899999, 0.30109715461699998, 0.32023191451999999, 0.29969692230200001, 0.30040502548199999, 0.30065608024599999, 0.30075788497900002, 0.30641603469799999, 0.298620939255, 0.29850816726700002, 0.29853796958899997, 0.29826688766499998, 0.298577070236, 0.29814004898099999, 0.29943299293499998, 0.29891991615300001, 0.298165082932, 0.30019903183000002, 0.29888296127300001, 0.30334305763199998, 0.30935788154600002, 0.30365204811099999, 0.29853391647299998, 0.298191070557, 0.30730795860299998, 0.29867315292399998, 0.29787182807899998, 0.29819393158000002, 0.29812002182000003, 0.29748177528399999, 0.30592608451800002, 0.30640101432799999, 0.30461001396199999, 0.29782390594500002, 0.29851198196399997, 0.297985076904, 0.29792809486400001, 0.29788279533399997, 0.29954814910900002, 0.29781603813200003], "base_times": [0.34838223457299999, 0.34886598587000001, 0.34866976738, 0.34914493560799997, 0.34892392158500002, 0.34897184371899997, 0.34874105453499998, 0.348833084106, 0.34855294227599998, 0.34869408607500002, 0.35135698318500003, 0.349362134933, 0.349206924438, 0.34881305694600001, 0.352702140808, 0.349253892899, 0.34810900688200003, 0.34919714927700002, 0.34905695915200002, 0.34929203987099999, 0.35006213188200003, 0.34877514839200002, 0.34930610656700001, 0.34935307502700003, 0.34853196144100002, 0.35073018074000001, 0.34898900985699999, 0.348687887192, 0.34926390647900002, 0.34951996803300001, 0.34894990921000002, 0.34928989410400002, 0.348195791245, 0.34861302375800002, 0.34957909584000002, 0.34903597831700001, 0.34931612014800001, 0.34873199462900001, 0.34908890724199998, 0.34929084777800001, 0.35125994682299999, 0.34869885444600002, 0.34865093231200001, 0.34934186935400002, 0.34947490692099997, 0.34992098808299998, 0.34933805465700002, 0.34940505027800001, 0.34924411773699998, 0.35305714607200001], "pypy_op_count": 6803}], ["pyflate-fast", "RawResult", {"changed_times": [2.7892501354200001, 2.81428313255, 2.7594170570399998, 2.8596301078800002, 2.8098950386000001, 2.79582881927, 2.8297259807600001, 2.9023971557600001, 2.8184750080100001, 2.8439729213699998, 2.9278090000199999, 2.9684200286900002, 2.9002227783199999, 2.9356980323799999, 3.0197591781600002, 3.2985301017799999, 3.00327801704, 3.14128494263, 2.93946099281, 2.8438420295700002, 2.8750038146999999, 2.83465504646, 2.8873970508600002, 2.8306438922899999, 2.8273618221299999, 2.9049830436700002, 2.8429479599, 2.86829996109, 2.8764369487799999, 2.8884871005999999, 2.8163690567000002, 2.8612251281700001, 2.8340430259699998, 2.94311404228, 2.87245893478, 2.9002301692999999, 3.0006260871900001, 2.9453620910599998, 2.8981261253400001, 2.93873715401, 2.80990791321, 2.8824269771600002, 2.8040461540199999, 2.9164080619799999, 2.83879303932, 2.8158490657800002, 3.0377941131599999, 3.0034391880000002, 2.8983750343299999, 2.8742489814800001], "base_times": [3.20239710808, 3.2851829528800001, 3.3033690452600002, 3.25438094139, 3.1806712150599998, 3.22766089439, 3.1690571308100002, 3.2391319274899999, 3.24552893639, 3.2414500713300001, 3.21615004539, 3.2073690891300002, 3.26479291916, 3.2651319503799998, 3.2642319202399999, 3.2732741832699999, 3.24680185318, 4.1558468341800001, 3.3647878169999998, 3.3489861488299999, 3.3357849121099998, 3.2745559215500002, 3.2354571819300002, 3.18098187447, 3.2074930667900001, 3.2279529571499999, 3.1906809806799998, 3.24963402748, 3.1782009601599999, 3.1669700145699999, 3.1640291214, 3.17319083214, 3.16554999352, 3.19478797913, 3.1732139587399999, 3.1655089855199998, 3.1706278324100001, 3.1658771038100002, 3.1661009788499999, 3.1638181209599998, 3.1724820136999998, 3.1914839744600001, 3.1756591796899998, 3.1584219932600002, 3.1598799228700001, 3.1623260974899998, 3.1672358512900001, 3.1677899360700001, 3.1730349063899999, 3.20204305649], "pypy_op_count": 34269}], ["raytrace-simple", "RawResult", {"changed_times": [1.73262095451, 1.7112429142000001, 1.7115881443000001, 1.7059481143999999, 1.7211890220599999, 1.70691204071, 1.76205778122, 1.73319506645, 1.7145311832400001, 1.6997001171099999, 1.7010910510999999, 1.717056036, 1.70808196068, 1.82129788399, 1.6985220909100001, 1.6981899738299999, 1.70037913322, 1.7169890403700001, 1.71297621727, 1.70048308372, 1.7680749893200001, 1.6982471942899999, 1.6960709094999999, 1.7025699615500001, 1.69845485687, 1.6962928772000001, 1.7055609226199999, 1.7190349102, 1.70597481728, 1.75849199295, 1.69964694977, 1.69898509979, 1.78078103065, 1.6959869861600001, 1.6965489387499999, 1.71496701241, 1.69671702385, 1.6973099708599999, 1.6970541477200001, 1.6958220005, 1.70453715324, 1.71490097046, 1.7014169693000001, 1.6966300010699999, 1.69708299637, 1.7079961299899999, 1.6944689750699999, 1.7149081230200001, 1.7047410011299999, 1.6980168819399999], "base_times": [2.70346713066, 2.70947599411, 2.7158110141799998, 2.71175098419, 2.7108299732200001, 2.7076630592300002, 2.70842194557, 2.7088060379000001, 2.7056019306199999, 2.7068400383000002, 2.7064809799199998, 2.71110296249, 2.7139241695399998, 2.71315908432, 2.7047669887499999, 2.7143340110800001, 2.7202367782599999, 2.7363440990400001, 2.7045259475700001, 2.7077820301100002, 2.7101051807399998, 2.7058789730099999, 2.7080590724900002, 2.7069141864800002, 2.7032549381300002, 2.7078340053600001, 2.7146408557899999, 2.7089159488700001, 2.7106828689600002, 2.7044599056199998, 2.7020061016099999, 2.7065110206599998, 2.7078759670300001, 2.70477390289, 2.7068409919700001, 2.7055530548100002, 2.7047748565699998, 2.7025859355900002, 2.7364628314999999, 2.7081980705299999, 2.70742607117, 2.7107248306299998, 2.7036969661699999, 2.7091271877300001, 2.7090950012200001, 2.7064878940599999, 2.7170219421400001, 2.7107899189000002, 2.71460294724, 2.7067561149600001], "pypy_op_count": 59284}], ["richards", "RawResult", {"changed_times": [0.45202112197900002, 0.42947602272000002, 0.42763590812699998, 0.425966024399, 0.42685508727999999, 0.42552399635299998, 0.42684698104899998, 0.44417214393600002, 0.42493796348599999, 0.42340397834799998, 0.42353701591499998, 0.42347097396900002, 0.42352604865999999, 0.42338705062900001, 0.42590904235799998, 0.42330503463699998, 0.42370104789700003, 0.42312502861000001, 0.42597603797900002, 0.42494797706600002, 0.42284703254700001, 0.42292404174800002, 0.42328500747699999, 0.422554969788, 0.426184177399, 0.42345690727200003, 0.42149114608799998, 0.42362189292899999, 0.42172002792399998, 0.42114090919500002, 0.42194080352800001, 0.42189478874199998, 0.42157793045000003, 0.42195200920100001, 0.421849012375, 0.42390108108500002, 0.42155480384799998, 0.421267986298, 0.42001795768700001, 0.42183709144600001, 0.42241597175599999, 0.42008614540099998, 0.42037081718399999, 0.42071795463599998, 0.42058992385900001, 0.42197108268700001, 0.42147111892700001, 0.42049002647400002, 0.42103791236900001, 0.41992688178999998], "base_times": [0.348742961884, 0.34398007392899999, 0.34170007705700001, 0.343639850616, 0.34330916404700001, 0.345504045486, 0.34513807296799998, 0.34365606308000002, 0.34516596794100002, 0.34796619415300001, 0.34372591972400002, 0.35078692436199999, 0.34651207923900001, 0.34183096885699998, 0.344089984894, 0.35141682624800002, 0.34654712676999999, 0.34858798980700001, 0.34706401824999999, 0.34679889678999998, 0.343952894211, 0.34561705589300001, 0.35027098655700001, 0.34220910072299998, 0.349514007568, 0.34791398048400002, 0.34430599212599999, 0.34272408485400002, 0.34231090545699999, 0.34883093833899997, 0.34666919708299998, 0.348744153976, 0.34603691101099998, 0.34201407432600001, 0.343272924423, 0.34394192695600001, 0.34105587005600002, 0.34224200248699999, 0.34430599212599999, 0.34348201751700003, 0.34811687469500002, 0.34051203727700002, 0.34049201011699998, 0.34295606613200003, 0.34316110611, 0.34472513198900001, 0.34576106071500001, 0.34455490112300002, 0.34601092338599998, 0.341138124466], "pypy_op_count": 25560}], ["spambayes", "RawResult", {"changed_times": [0.364380121231, 0.44119691848800002, 0.41675400733899998, 0.41085791587800002, 0.386422157288, 0.31945586204499998, 0.39294505119299999, 0.29344987869299999, 0.29942297935500001, 0.27105689048800002, 0.30189108848599999, 0.254878997803, 0.23610687255900001, 0.30035305023199999, 0.481290102005, 0.39843392372100001, 0.26692605018600002, 0.23347496986399999, 0.28728699684100001, 0.22171378135700001, 0.22206902504000001, 0.30090498924300002, 0.227879047394, 0.21359992027300001, 0.218796014786, 0.26997900009199999, 0.24744987487799999, 0.221409082413, 0.209238052368, 0.236274003983, 0.25710320472699999, 0.28479599952700002, 0.204644918442, 0.20791101455700001, 0.208427906036, 0.27565217018100002, 0.23077392578100001, 0.222128868103, 0.216094017029, 0.22877502441399999, 0.19906902313200001, 0.20046997070299999, 0.22600197792099999, 0.27111887931799999, 0.19655299186700001, 0.19667291641199999, 0.19611716270400001, 0.226536035538, 0.19816303253199999, 0.21359205245999999], "base_times": [0.30083990097000002, 0.30107092857399997, 0.301158905029, 0.30125212669399998, 0.30099701881399998, 0.30041384697000001, 0.30106282234199999, 0.30104899406399999, 0.301003217697, 0.301099061966, 0.30334615707399998, 0.30100893974300003, 0.30095696449300002, 0.30104303360000001, 0.30135512351999999, 0.30050110816999998, 0.30136203765899999, 0.30115795135500001, 0.30083012580899998, 0.30126714706399998, 0.30079507827800001, 0.30091309547400003, 0.30103015899699997, 0.30150794982899998, 0.30061888694799999, 0.300558805466, 0.30093193054200001, 0.30112004280100002, 0.30096697807299999, 0.30083203315700002, 0.30114006996199999, 0.30109095573400002, 0.30080103874199998, 0.30101490020799998, 0.30102705955499998, 0.30132293701200003, 0.30128097534199999, 0.301703214645, 0.30121803283699999, 0.30156207084699999, 0.30127501487699998, 0.30118989944500002, 0.30148100852999998, 0.30227804184000001, 0.30163002014200002, 0.30123686790499998, 0.301606893539, 0.301479816437, 0.30178403854399999, 0.30203700065599998], "pypy_op_count": 79856}], ["spectral-norm", "RawResult", {"changed_times": [0.10959196090700001, 0.042332172393800001, 0.040981054305999999, 0.039587974548300001, 0.040713071823100001, 0.0411989688873, 0.038578987121600003, 0.040179014205899997, 0.038127183914200002, 0.038395881652800001, 0.038568973541299997, 0.037137985229500001, 0.037296056747399997, 0.036942958831800002, 0.037352085113500001, 0.037146091461200001, 0.0367410182953, 0.037198066711400003, 0.037118911743199999, 0.037197113037100001, 0.037024974822999997, 0.037132978439300003, 0.0367619991302, 0.0372228622437, 0.0369851589203, 0.036921024322500003, 0.0372228622437, 0.037124156951900002, 0.037063837051399998, 0.036730051040600002, 0.037120103836100003, 0.037787914276100003, 0.037085056304899999, 0.037086009979200002, 0.037244081497200002, 0.036707162857099997, 0.037101030349699998, 0.037016868591300003, 0.036715030670200002, 0.037021160125700003, 0.037648916244500001, 0.036882162094100002, 0.037200927734400001, 0.03688788414, 0.0368630886078, 0.036957979202299998, 0.0372130870819, 0.036842107772799997, 0.036978006362900002, 0.038713932037399999], "base_times": [0.52954912185699998, 0.52811813354500003, 0.527950048447, 0.52731108665500004, 0.52899098396300004, 0.52680087089500005, 0.52559590339700002, 0.52714109420800004, 0.52924609184299998, 0.52887797355699995, 0.534622907639, 0.529407024384, 0.52711510658299998, 0.52651882171599995, 0.52681398391699996, 0.52387809753400005, 0.52621483802799995, 0.52459812164300001, 0.53056693077100003, 0.52271103858900003, 0.54028511047399996, 0.52738118171699999, 0.53087997436500001, 0.526041984558, 0.53446507453900005, 0.52807283401500005, 0.54005789756800004, 0.52472209930400004, 0.52734208107000002, 0.52826714515700002, 0.52903914451599998, 0.52639102935799997, 0.52782988548300003, 0.53036499023399997, 0.52859020233200005, 0.52875494957000002, 0.52858805656399999, 0.52843213081399998, 0.52754306793200001, 0.52720117569000002, 0.52665996551500005, 0.52881693839999999, 0.52722406387300003, 0.52798104286199998, 0.52870011329699995, 0.52709317207299999, 0.52946305275000005, 0.52820086479200001, 0.52963709831200001, 0.53435611724900001], "pypy_op_count": 3298}], ["telco", "RawResult", {"changed_times": [0.93205800000000005, 0.83205200000000001, 0.77204799999999996, 0.80805099999999996, 0.77204799999999996, 0.81205099999999997, 0.792049, 0.76404799999999995, 0.75204700000000002, 0.75204700000000002, 0.74804700000000002, 0.74804700000000002, 0.75204700000000002, 0.74004599999999998, 0.75604700000000002, 0.73604599999999998, 0.75204700000000002, 0.74004599999999998, 0.73604599999999998, 0.73604599999999998, 0.75204700000000002, 0.74404700000000001, 0.74804700000000002, 0.73604599999999998, 0.74404599999999999, 0.73604599999999998, 0.73204599999999997, 0.73204599999999997, 0.74804700000000002, 0.76804700000000004, 0.74404700000000001, 0.74404599999999999, 0.73204599999999997, 0.73204599999999997, 0.73204599999999997, 0.74804700000000002, 0.73204499999999995, 0.73204599999999997, 0.72804599999999997, 0.72804599999999997, 0.74404700000000001, 0.73204499999999995, 0.72804599999999997, 0.73204599999999997, 0.74404599999999999, 0.73204599999999997, 0.72804599999999997, 0.73604599999999998, 0.74404599999999999, 0.73204599999999997], "base_times": [1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.2, 1.22, 1.21, 1.2, 1.2, 1.21, 1.21, 1.21, 1.22, 1.2, 1.21, 1.21, 1.21, 1.21, 1.21, 1.22, 1.2, 1.21, 1.22, 1.21, 1.2, 1.21, 1.21, 1.21, 1.22, 1.2, 1.21, 1.21, 1.22, 1.2, 1.21, 1.21, 1.21, 1.21, 1.2, 1.21, 1.21, 1.21, 1.21, 1.21, 1.21, 1.21, 1.2, 1.22], "pypy_op_count": 36786}], ["twisted_names", "RawResult", {"changed_times": [0.0077881619937694704, 0.0079113924050632917, 0.0078988941548183249, 0.0075987841945288756, 0.0081234768480909821, 0.0081366965012205049, 0.0078125, 0.0074906367041198503, 0.0075815011372251705, 0.0078247261345852897, 0.0075930144267274107, 0.0074962518740629685, 0.0080775444264943458, 0.0074962518740629685, 0.007955449482895784, 0.0074682598954443615, 0.0074682598954443615, 0.0076863950807071479, 0.0074349442379182153, 0.0074404761904761901, 0.0076511094108645756, 0.007446016381236039, 0.0074183976261127599, 0.0076804915514592934, 0.0073964497041420114, 0.0073964497041420114, 0.0076511094108645756, 0.0074349442379182153, 0.0074074074074074077, 0.0084459459459459464, 0.0094876660341555973, 0.0091659028414298807, 0.0086505190311418692, 0.0078186082877247844, 0.0075528700906344415, 0.0074682598954443615, 0.0076982294072363358, 0.007874015748031496, 0.0075187969924812026, 0.0075872534142640367, 0.0076335877862595417, 0.0073691967575534268, 0.0073583517292126564, 0.007621951219512195, 0.0074404761904761901, 0.007385524372230428, 0.007575757575757576, 0.007385524372230428, 0.0073800738007380072, 0.0073746312684365781], "base_times": [0.0096061479346781949, 0.0096061479346781949, 0.0096153846153846159, 0.0096246390760346481, 0.0095969289827255271, 0.0096246390760346481, 0.0095969289827255271, 0.0096618357487922701, 0.0097465886939571145, 0.0095693779904306216, 0.0096153846153846159, 0.0095877277085330784, 0.0096153846153846159, 0.0096061479346781949, 0.0096246390760346481, 0.0096246390760346481, 0.0096061479346781949, 0.0095969289827255271, 0.009643201542912247, 0.0096246390760346481, 0.0095877277085330784, 0.0096339113680154135, 0.0095969289827255271, 0.0096246390760346481, 0.0095877277085330784, 0.0095969289827255271, 0.0096153846153846159, 0.0095969289827255271, 0.0096153846153846159, 0.009643201542912247, 0.0095969289827255271, 0.0096061479346781949, 0.0095969289827255271, 0.0099009900990099011, 0.0095969289827255271, 0.0095877277085330784, 0.0095785440613026813, 0.0096153846153846159, 0.0096525096525096523, 0.0096153846153846159, 0.0095877277085330784, 0.0095785440613026813, 0.010131712259371834, 0.0096899224806201549, 0.0095877277085330784, 0.0096153846153846159, 0.0096246390760346481, 0.0096153846153846159, 0.0096993210475266739, 0.0095877277085330784], "pypy_op_count": 59966}]], "branch": "trunk", "revision": 0} \ No newline at end of file diff --git a/talk/icooolps2011/benchmarks/benchmarks.gnumeric b/talk/icooolps2011/benchmarks/benchmarks.gnumeric new file mode 100644 index 0000000000000000000000000000000000000000..6fde41d4cf7bb53c74b65b8279243590c39a4838 GIT binary patch [cut] diff --git a/talk/icooolps2011/benchmarks/paper-no-version.json b/talk/icooolps2011/benchmarks/paper-no-version.json new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/benchmarks/paper-no-version.json @@ -0,0 +1,1 @@ +{"options": "", "results": [["crypto_pyaes", "RawResult", {"changed_times": [0.85660719871500002, 0.25109004974400001, 0.26645398139999998, 0.26426911354100002, 0.24821281433100001, 0.22526907920799999, 0.22687101364100001, 0.230090141296, 0.23683094978300001, 0.26407194137599999, 0.241759061813, 0.228569030762, 0.23997998237599999, 0.24881386756900001, 0.23091697692900001, 0.22572493553199999, 0.23083400726299999, 0.23484611511199999, 0.27470088005100002, 0.22409009933499999, 0.22573399543799999, 0.22727704048200001, 0.22345495224, 0.22484588623000001, 0.22653484344499999, 0.23255515098599999, 0.23176980018599999, 0.256299972534, 0.224644899368, 0.232671976089, 0.25687217712400001, 0.22961306572000001, 0.22844290733299999, 0.22646903991699999, 0.239099979401, 0.38659214973400002, 0.25287604332000002, 0.305903911591, 0.22831511497500001, 0.23446798324599999, 0.23727679252600001, 0.24697995185900001, 0.26321220397900003, 0.30946302413900001, 0.23587203025799999, 0.28367996215800001, 0.25468206405600002, 0.22411704063400001, 0.224369049072, 0.233715057373], "base_times": [2.95118713379, 2.9746890068099998, 2.92976403236, 2.9452641010299998, 2.9651250839199998, 2.9688098430599998, 2.9744441509200001, 2.97583580017, 2.9489080905899998, 2.91677689552, 2.9242839813199999, 2.9339048862500001, 2.9542081356000001, 2.9471797943100002, 3.0127420425400002, 2.9698240757000001, 3.0563960075400001, 2.95825481415, 2.9417591095, 2.95723295212, 2.9287509918199999, 2.9516119957, 2.9591209888500001, 2.9289331436200001, 3.0025730133100001, 2.9620790481600001, 2.9323070049300002, 2.95103597641, 2.9657509326899998, 2.96019887924, 2.9291179180100002, 2.9258801937099999, 2.9316611290000001, 2.9406158924099999, 2.94563484192, 2.9362061023699999, 2.93843007088, 2.9407470226300001, 2.9225850105300002, 2.94923400879, 2.9675500392899998, 3.0097930431400002, 2.9922630786900002, 2.9343569278700001, 2.9358370304100001, 2.93531298637, 2.942029953, 2.9586639404300001, 2.9446339607200001, 2.9721648693099998], "pypy_op_count": 14046}], ["django", "RawResult", {"changed_times": [0.407593011856, 0.41135716438300002, 0.40730595588700003, 0.41726803779600002, 0.38440489769000002, 0.38599610328700001, 0.380656003952, 0.37635588645899998, 0.37482500076300002, 0.38165783882100002, 0.370150089264, 0.40519595146199999, 0.36679697036699999, 0.375, 0.36616206169100002, 0.363984107971, 0.37545204162599999, 0.36812806129499998, 0.363806009293, 0.37058019638099998, 0.38762021064800001, 0.37914204597500001, 0.40493202209500001, 0.41710805892899999, 0.41025805473299998, 0.429105997086, 0.392134904861, 0.36547994613599999, 0.36286902427700002, 0.37187910079999997, 0.37227296829200002, 0.370716810226, 0.36868095397900003, 0.391187906265, 0.36856198310900001, 0.382675886154, 0.378391981125, 0.37215113639800002, 0.37785315513599999, 0.376590967178, 0.36604809761000001, 0.37839484214800001, 0.36568999290499998, 0.36654806137099999, 0.37874388694799999, 0.36946606636000001, 0.379332065582, 0.36711096763599999, 0.40703082084699999, 0.40616917610199998], "base_times": [1.0226759910600001, 1.0134680271100001, 1.0094950199099999, 1.0109300613400001, 1.14689588547, 1.1010918617200001, 1.0280420780199999, 1.0567359924299999, 1.00924396515, 0.99798607826200003, 1.0025069713600001, 1.05320596695, 1.03612494469, 0.99561715126000006, 0.99825000762899996, 1.0139150619499999, 0.99813008308399997, 1.0111601352699999, 1.0119559764899999, 1.0103020668, 1.0020368099200001, 1.0086209773999999, 0.99736905097999995, 0.997425079346, 0.99537086486799997, 0.99607181549099999, 0.99438095092800005, 1.0023310184500001, 0.99667096138, 0.99634790420499997, 0.99447798728900005, 0.99665188789400005, 0.99618005752600003, 1.00025486946, 0.99415612220799998, 0.99805307388300002, 0.99549102783200005, 0.99680113792400005, 0.996330976486, 1.0003499984699999, 0.99467611312899995, 0.995177030563, 0.99604010582000002, 0.99532794952400006, 0.99472689628599997, 0.99493598938000005, 0.99426794052099998, 0.99490714073200004, 0.99573802948000001, 0.99564909935000001], "pypy_op_count": 12609}], ["fannkuch", "RawResult", {"changed_times": [0.54783511161800003, 0.40379786491399999, 0.401130914688, 0.40693402290300001, 0.40010213851900001, 0.398326158524, 0.39701294899, 0.396836042404, 0.39628291130100002, 0.39734387397799997, 0.39560008049000001, 0.39564681053200002, 0.39718198776199998, 0.40202188491800001, 0.400449991226, 0.39852714538599998, 0.39627003669700001, 0.39705395698500001, 0.39464116096500002, 0.39573907852200002, 0.39660811424300002, 0.39483594894399998, 0.40028595924400001, 0.39557194709799998, 0.39540195465099998, 0.39352893829300001, 0.39560508727999999, 0.40281701087999999, 0.39322996139499999, 0.39455413818399998, 0.39709711074800003, 0.39430522918700001, 0.39379692077599998, 0.393843889236, 0.39732885360699999, 0.393801927567, 0.394963979721, 0.39444684982299999, 0.39454388618500003, 0.422821998596, 0.39703083038300002, 0.393558979034, 0.39373493194600001, 0.39363002777099998, 0.39424920082100001, 0.39391207695000002, 0.394250154495, 0.39628601074199998, 0.393650054932, 0.394150972366], "base_times": [1.96565008163, 1.96358084679, 1.9657340049700001, 1.9662079811099999, 1.9633557796500001, 1.9605541229200001, 1.9689118862199999, 1.99235606194, 1.99616289139, 2.0379829406700001, 1.98708796501, 1.9701840877500001, 1.9738221168500001, 2.0010590553299998, 2.1197910308800001, 2.0885350704199999, 1.97554397583, 2.0195069313, 2.0578560829199999, 2.0299439430200001, 2.0169620513900002, 1.96816086769, 1.9554419517499999, 1.9587059021, 1.9706389903999999, 1.96200299263, 1.9557890892000001, 1.9645969867699999, 1.9544169902799999, 1.9552989005999999, 1.96198511124, 1.9532461166399999, 1.95332384109, 1.96204090118, 1.95293807983, 1.9540951252000001, 1.9616918563800001, 1.9545300006899999, 1.9541399478899999, 1.9610979556999999, 1.9537088870999999, 1.95520186424, 1.96393203735, 1.9539270401, 1.95707201958, 1.9638590812700001, 1.9524040222200001, 1.95817089081, 1.9639239311200001, 1.95282506943], "pypy_op_count": 4169}], ["go", "RawResult", {"changed_times": [0.55670309066799994, 0.69546198844899998, 0.50835895538300002, 0.61876583099399995, 0.50758814811700004, 0.54629898071299998, 0.52005887031599995, 0.564098834991, 0.48538398742700001, 0.494337081909, 0.51523685455299995, 0.57939195632899998, 0.45562791824299997, 0.55300307273899996, 0.49721884727499999, 0.41302704811099999, 0.41061592102099997, 0.47412204742399999, 0.45734405517600002, 0.42186212539700002, 0.54384589195300004, 0.46552205085800002, 0.47689890861500001, 0.44314694404600002, 0.436156988144, 0.44991993904100003, 0.430601119995, 0.51746106147799997, 0.46658205986000001, 0.50872802734400002, 0.42195010185199999, 0.406759977341, 0.41242504119899998, 0.41428184509299998, 0.43706083297699999, 0.44568800926199997, 0.427667140961, 0.427916049957, 0.40931391715999998, 0.46223688125599999, 0.39105701446500002, 0.39657497405999997, 0.40410494804399999, 0.41864991188, 0.44163799285900002, 0.44532203674300003, 0.41887688636800002, 0.39597201347400002, 0.39941596984900002, 0.432480096817], "base_times": [0.94275093078600003, 0.94737291336100005, 0.94530296325700003, 0.94302606582600002, 0.94317698478699996, 0.94366908073400002, 0.94220113754299994, 0.94911313056900004, 0.94787907600400001, 0.952267169952, 0.95295310020400004, 0.94901013374300003, 0.94355821609500001, 0.94918394088699998, 0.94301795959500001, 0.94415211677599997, 0.94486212730399999, 0.94190502166699996, 0.94672203064000005, 0.94616103172300003, 0.94336009025599998, 0.94350886344899998, 0.94104003906199996, 0.95141983032199995, 0.94677495956400004, 0.96536707878100003, 0.94324088096600001, 0.93978691101099998, 0.94566988944999997, 0.94016504287699998, 0.95200586319000002, 0.95005297660800003, 0.94289088249200004, 0.94460988044700001, 0.94383192062400001, 0.94846010208099996, 0.94728279113799996, 0.94078707694999997, 0.94379806518599996, 0.94446706771900002, 0.94111990928599998, 0.94424080848699998, 0.95194506645200005, 0.94808506965600003, 0.94305801391599997, 0.94678306579600002, 0.94263887405400004, 0.95195889472999995, 0.94346690177899994, 0.941094875336], "pypy_op_count": 129099}], ["html5lib", "RawResult", {"changed_times": [14.8199660778, 10.706737995099999, 10.304239988300001, 10.1642229557, 10.143066883099999, 10.0018801689, 10.475807189899999, 10.0920028687, 9.9912219047500006, 9.8719079494499997, 9.7611570358299993, 10.0030069351, 10.0168190002, 9.7681109905200003, 9.9395668506600003, 9.9272329807300004, 9.9540989398999997, 9.8718581199600006, 9.9119620323199999, 9.7996621131899992, 10.045171976100001, 9.8548738956499999, 9.8529460430100002, 9.8699140548699997, 9.9297790527299998, 9.8516998290999993, 9.8401410579699995, 9.8348610401199998, 9.9134180545799993, 9.8594779968299999, 9.8355488777200009, 9.8422799110400003, 9.8731150627099993, 9.9001691341400004, 9.8656699657400004, 9.8242979049700008, 9.8268721103699992, 9.8630769252799997, 9.8425250053400006, 9.9070279598200006, 9.8454899787900008, 9.8203971386000006, 9.7860939502700006, 9.7712509632100009, 9.7909479141200002, 9.8059241771700005, 9.7955400943799997, 9.8054561615000004, 9.8150610923800006, 9.8305990695999999], "base_times": [14.4665699005, 14.787790060000001, 14.601052045799999, 14.744522094700001, 14.751811027500001, 14.872506141700001, 14.6056640148, 14.5326700211, 14.552961826300001, 14.482889890699999, 14.520477056500001, 14.782648801800001, 14.7576990128, 14.526720047, 14.517261982000001, 14.5054080486, 14.466678142499999, 14.613055944399999, 14.6220519543, 14.594491958600001, 14.5681650639, 14.5162010193, 14.5017650127, 14.628161907200001, 14.630822181699999, 14.584771871599999, 14.5714728832, 14.547231912599999, 14.529563188599999, 14.6472890377, 14.642200946799999, 14.569794178, 14.561717033400001, 14.5249350071, 14.506607770900001, 14.666437864300001, 14.623661994900001, 14.6292998791, 14.5649518967, 14.5096549988, 14.515206813800001, 14.6548969746, 14.607022046999999, 14.581348180799999, 14.5426449776, 14.5474069118, 14.5219600201, 14.4813439846, 14.613568067599999, 14.5961921215], "pypy_op_count": 166028}], ["meteor-contest", "RawResult", {"changed_times": [0.39125895500199998, 0.305256843567, 0.29775500297500002, 0.29627394676199997, 0.29607701301599998, 0.29897022247299998, 0.30167293548599999, 0.292908906937, 0.29461503028899999, 0.29311490058900003, 0.29411005973799997, 0.29288792610199998, 0.312241077423, 0.29409599304200001, 0.29273295402499999, 0.29297900199900001, 0.294744968414, 0.305186986923, 0.29204797744799998, 0.29193305969200001, 0.29223799705499998, 0.29134392738300002, 0.29090404510500001, 0.29158687591600002, 0.291989088058, 0.29211592674300002, 0.29190587997400003, 0.29213595390300001, 0.29530119895899998, 0.29241394996600001, 0.29196000099199998, 0.29344511032100001, 0.30158686637900001, 0.29143214225800002, 0.29167699813800002, 0.29213809966999998, 0.29569697380100002, 0.293179988861, 0.302819013596, 0.29169201850900001, 0.29149794578600002, 0.30438613891600003, 0.29123783111599999, 0.29173803329499998, 0.29136610031100002, 0.29133296012900001, 0.29149794578600002, 0.29206991195699999, 0.29118394851700002, 0.29199099540700002], "base_times": [0.34765696525599998, 0.34734988212599999, 0.34765100479099997, 0.35128021240200002, 0.34793090820299999, 0.34800696373000001, 0.34878993034400002, 0.34669995307899998, 0.34712195396399997, 0.34708309173599999, 0.34988093376200002, 0.34765815734900002, 0.34691905975300003, 0.34740209579499998, 0.34747791290300001, 0.34716796875, 0.34861993789700002, 0.34709882736199998, 0.34724092483500002, 0.34740400314300002, 0.34688115119899998, 0.34738016128499999, 0.34689903259299998, 0.34762477874800002, 0.34657502174400001, 0.35003590583799998, 0.36109685897799998, 0.34675884246799998, 0.34708809852599998, 0.34734010696399997, 0.34675812721299998, 0.34714412689200003, 0.35013604164099998, 0.34707999229399999, 0.34788894653300001, 0.34718084335299998, 0.34700298309299998, 0.34704089164700003, 0.34711098671000001, 0.34686899185199999, 0.34945011138900001, 0.34751892089800002, 0.34723496437099999, 0.34709191322299998, 0.34717702865599998, 0.34727191925000001, 0.34770107269299999, 0.34721708297699999, 0.34715199470500002, 0.34774684905999997], "pypy_op_count": 6996}], ["pyflate-fast", "RawResult", {"changed_times": [2.9336280822799998, 2.7260229587599998, 2.69612598419, 2.7473921775800001, 2.6622281074499998, 2.6487939357800001, 2.6407780647300001, 2.6393201351200002, 2.6538500785800001, 2.6305968761399998, 2.7002441883100001, 2.6253590583799999, 2.67698216438, 2.6386201381699999, 2.6217720508600002, 2.65109586716, 2.6514439582799998, 2.6154839992499999, 2.6436219215399999, 2.6367909908299998, 2.6433351039900002, 2.6556339263900002, 2.64738106728, 2.6475849151599999, 2.6414918899500002, 2.6471059322400001, 2.64943504333, 2.6386830806699999, 2.6418809890700001, 2.6165759563400002, 2.6333000659899999, 2.62011599541, 2.64301490784, 2.62280106544, 2.6208510398899998, 2.6472628116600001, 2.64691090584, 2.6264300346399998, 2.6229889392899999, 2.6373569965399999, 2.6247351169600002, 2.6223349571200001, 2.6257328987099999, 2.62431001663, 2.6357960701000001, 2.6368370056199999, 2.6350429058099998, 2.6130440235100001, 2.6352710723900001, 2.6277658939399999], "base_times": [3.1858839988700001, 3.2330210208899999, 3.2228751182600002, 3.2038969993599999, 3.2087049484299999, 3.2155289649999999, 3.2001509666399999, 3.21138501167, 3.2287571430200002, 3.2224068641699999, 3.2240669727300002, 3.2130041122400002, 3.1999988555900001, 3.2111039161699999, 3.1994287967699999, 3.2080030441299998, 3.2097260952000002, 3.2443308830299999, 3.2008681297299999, 3.2264659404799998, 3.2006170749699998, 3.1997480392500002, 3.2113089561499999, 3.1964259147599998, 3.2031409740400001, 3.2646939754500002, 3.2095251083399998, 3.1986999511700001, 3.2019040584599998, 3.2138681411699999, 3.21081399918, 3.2096848487899998, 3.2060360908500001, 3.2242438793199999, 3.2055530548100002, 3.2049050331100002, 3.2005050182299999, 3.21747493744, 3.2119069099400002, 3.2022020816799999, 3.20810699463, 3.2195401191699999, 3.2008290290799999, 3.2268478870399999, 3.1975791454300002, 3.2149081230199998, 3.22310090065, 3.205078125, 3.2130398750300002, 3.2354979515100002], "pypy_op_count": 30517}], ["raytrace-simple", "RawResult", {"changed_times": [1.42172884941, 1.4523768425000001, 1.4166131019599999, 1.43039894104, 1.41783189774, 1.4185981750500001, 1.46636295319, 1.4397389888800001, 1.42330217361, 1.41751694679, 1.4113578796399999, 1.41164684296, 1.4322988987, 1.4254009723700001, 1.41054987907, 1.4191510677300001, 1.4134058952299999, 1.4123980999000001, 1.4495639801, 1.41085386276, 1.4700739383700001, 1.4080970287300001, 1.40893101692, 1.4085099697100001, 1.40990281105, 1.4086458683, 1.42362308502, 1.4067831039400001, 1.4118680954, 1.4424681663500001, 1.40993595123, 1.41482210159, 1.4099900722500001, 1.4298179149600001, 1.40712118149, 1.4114291667900001, 1.4100201129900001, 1.40861296654, 1.4086999893200001, 1.40995907784, 1.40953087807, 1.40687513351, 1.4568691253699999, 1.4097790718100001, 1.40708613396, 1.41853809357, 1.4097981452899999, 1.40812301636, 1.4068689346300001, 1.435079813], "base_times": [2.6929850578300001, 2.7008759975399999, 2.6969349384300001, 2.6945369243599999, 2.70288705826, 2.6972000598900001, 2.6927421093000001, 2.7058789730099999, 2.7004828453099998, 2.6940310001399999, 2.6985640525800001, 2.6984570026400001, 2.6975989341700002, 2.70705103874, 2.70004582405, 2.7040450573000001, 2.7167949676499998, 2.7481319904300001, 2.7030627727500001, 2.7222740650200001, 2.7097630500799998, 2.7017130851700002, 2.7049491405500001, 2.7019391059900002, 2.6930129528000002, 2.70267915726, 2.6979541778599998, 2.6964271068599999, 2.70317316055, 2.7043521404300002, 2.6998660564399999, 2.7004528045699998, 2.6969769001000001, 2.6994950771299999, 2.7030401229900001, 2.69969916344, 2.6992092132600001, 2.7012040615099999, 2.7047250270799998, 2.6986148357399999, 2.7002339363100001, 2.7016398906700001, 2.6966798305499999, 2.69913887978, 2.69826197624, 2.7472999095900001, 2.70126199722, 2.6996040344200001, 2.69653892517, 2.7003519535099998], "pypy_op_count": 49019}], ["richards", "RawResult", {"changed_times": [0.38921499252300001, 0.40848493576, 0.37612414360000002, 0.36995291709900002, 0.37589097023000001, 0.37279701232899998, 0.37260603904700001, 0.38784599304200001, 0.37052488326999999, 0.37013506889300002, 0.37260913848900001, 0.37150597572299998, 0.37098598480200001, 0.37117815017700001, 0.37773084640499999, 0.381129026413, 0.37756896019000002, 0.39857006073000001, 0.38322901725800002, 0.39928102493299999, 0.37151002883899997, 0.37961983680700001, 0.39602184295699999, 0.37826395034799998, 0.388334989548, 0.37239098548900001, 0.37134599685699998, 0.39841699600199998, 0.36968493461599999, 0.37443280220000003, 0.38061380386400001, 0.36869883537300002, 0.36913299560500001, 0.36869812011699998, 0.37549090385400002, 0.384240865707, 0.37975287437400002, 0.38205003738400001, 0.39467287063599998, 0.376003026962, 0.38235497474699998, 0.37459301948500001, 0.38037991523699999, 0.38209605216999998, 0.37744593620299999, 0.417228937149, 0.39831590652499999, 0.40871381759600001, 0.39757204055799999, 0.38842010498000001], "base_times": [0.36929011344899998, 0.36319303512599999, 0.36975717544600001, 0.35680294036900001, 0.36748695373500001, 0.38347291946399997, 0.38819885253899999, 0.37669301033000002, 0.37662100791899999, 0.42647099494899998, 0.503195047379, 0.42110991477999998, 0.42115807533299998, 0.37781286239599998, 0.39006710052499999, 0.46600699424699998, 0.38637280464200002, 0.36604809761000001, 0.46971583366399999, 0.394068956375, 0.35752010345500002, 0.37415003776599998, 0.36700391769399998, 0.36034679412800003, 0.369906902313, 0.40583395957899998, 0.42960190773000001, 0.36803388595600001, 0.36554813385000001, 0.35456085205100002, 0.36160111427300001, 0.36386299133299999, 0.348219156265, 0.35932707786599999, 0.40295696258500002, 0.365478038788, 0.35353589057899998, 0.39382195472699999, 0.43966794013999999, 0.357973098755, 0.35619902610800003, 0.35322117805499997, 0.407044887543, 0.366625070572, 0.38970613479600003, 0.37186503410299998, 0.388589859009, 0.36379194259600001, 0.36491799354600002, 0.36309003829999997], "pypy_op_count": 21483}], ["spambayes", "RawResult", {"changed_times": [0.37085604667700001, 0.449714899063, 0.43818902969399998, 0.45964503288300002, 0.39759683609000002, 0.327363014221, 0.373461008072, 0.33009004592899999, 0.288415908813, 0.25380206108100001, 0.29265499114999999, 0.31383991241499998, 0.25973296165499998, 0.276574134827, 0.463959932327, 0.42528295516999998, 0.31899809837300003, 0.24085998535200001, 0.27500414848299998, 0.21268701553300001, 0.20349502563499999, 0.27293515205399999, 0.217705011368, 0.20174884796100001, 0.27232503890999998, 0.23572087287900001, 0.243690013885, 0.25267601013199997, 0.203569889069, 0.19889497756999999, 0.27288603782699999, 0.247258901596, 0.21203303337099999, 0.24201416969299999, 0.21479105949399999, 0.232662916183, 0.26705980300900001, 0.23874998092700001, 0.19264793395999999, 0.191231012344, 0.192067146301, 0.222138881683, 0.20613908767700001, 0.22350716590899999, 0.25135397911099999, 0.19591498374899999, 0.18741703033400001, 0.18515801429699999, 0.21708202362099999, 0.23711204528800001], "base_times": [0.33422708511400001, 0.32084798812900001, 0.31727600097699998, 0.34619307517999998, 0.31283521652200003, 0.41850996017499997, 0.38115787506100002, 0.43639492988599998, 0.46454811096199999, 0.39815282821699999, 0.33398604392999998, 0.39315700530999997, 0.30895090103099998, 0.310653924942, 0.31494808197000002, 0.30306100845299999, 0.30477905273400002, 0.44053101539599998, 0.34001016616800001, 0.42033720016499998, 0.33272194862400001, 0.33113598823500001, 0.33127808570900003, 0.34944200515700002, 0.33303189277599998, 0.34277606010400002, 0.341919183731, 0.35174393653899999, 0.35507202148400002, 0.31040811538700003, 0.35218214988699997, 0.31959795951800002, 0.34789395332299999, 0.31583809852599998, 0.31663799285900002, 0.32636308670000003, 0.40610194206200001, 0.347922086716, 0.36162686348, 0.35591888427700002, 0.41992783546399998, 0.33234691619899998, 0.36476802825900001, 0.32945489883399998, 0.422051906586, 0.37588191032399998, 0.34078192710900002, 0.33825683593799999, 0.31020188331600002, 0.308086872101], "pypy_op_count": 74938}], ["spectral-norm", "RawResult", {"changed_times": [0.11298799514799999, 0.048493862152099998, 0.047081947326699999, 0.044140100479099997, 0.045753002166700002, 0.0442399978638, 0.0418028831482, 0.046712875366200002, 0.044949054718000002, 0.046372890472400001, 0.041565895080599997, 0.038805007934599997, 0.039693832397499997, 0.040109872817999999, 0.040932178497299999, 0.0415549278259, 0.044153928756700003, 0.040276050567600002, 0.0395691394806, 0.0391969680786, 0.038546800613399997, 0.0392980575562, 0.038042068481400002, 0.0373330116272, 0.038211107254000003, 0.041094064712500003, 0.038785934448199999, 0.039836883544899997, 0.039690971374499999, 0.0383570194244, 0.039561986923200003, 0.0389280319214, 0.038964986801099999, 0.0391237735748, 0.040608882904100002, 0.039892911911000001, 0.043267011642499999, 0.045034885406499997, 0.0438969135284, 0.044118881225600003, 0.044566154480000002, 0.044809103012099998, 0.043088912963900002, 0.039181947708099997, 0.039462089538599997, 0.0385558605194, 0.041587114334099999, 0.0440001487732, 0.043517112731900003, 0.046321153640699997], "base_times": [0.52543711662299997, 0.53290581703200002, 0.53550910949700004, 0.54762792587300002, 0.584457874298, 0.51257610321000002, 0.51202011108400003, 0.53016495704699995, 0.52482008934000002, 0.51149320602399995, 0.51185202598599999, 0.50427293777500004, 0.49358701705899999, 0.49250912666300001, 0.49934816360500001, 0.49671888351400001, 0.51557683944699995, 0.52167820930499997, 0.51248502731300005, 0.50570583343499997, 0.509876966476, 0.52948999404899999, 0.54375600814799996, 0.50895595550499995, 0.50666904449500005, 0.50097918510399997, 0.50491905212400001, 0.50903892517100002, 0.50355887413, 0.50248503685000001, 0.49978804588300002, 0.51629614830000004, 0.51909399032600001, 0.50487399101300001, 0.50982809066799994, 0.50312399864199997, 0.50102281570399998, 0.50176215171799998, 0.51172208785999995, 0.52313280105600002, 0.50979900360099994, 0.49791312217700001, 0.496323108673, 0.49450111389200002, 0.49474310874900002, 0.49678802490200002, 0.493948936462, 0.49794697761500001, 0.50429987907399998, 0.50727891922000001], "pypy_op_count": 3298}], ["telco", "RawResult", {"changed_times": [1.092068, 0.91205700000000001, 0.91205700000000001, 0.89205500000000004, 0.87205500000000002, 0.89605599999999996, 0.904057, 0.85605299999999995, 0.85205299999999995, 0.84805399999999997, 0.83605200000000002, 0.83605099999999999, 0.84405399999999997, 0.876054, 0.89205599999999996, 0.85605399999999998, 0.83205200000000001, 0.83205200000000001, 0.85605299999999995, 0.84005300000000005, 0.84405200000000002, 0.84805299999999995, 0.86405399999999999, 0.82805200000000001, 0.82805200000000001, 0.85205399999999998, 0.82805099999999998, 0.820052, 0.82405099999999998, 0.86005399999999999, 0.82005099999999997, 0.86805399999999999, 0.82405200000000001, 0.82405099999999998, 0.82405200000000001, 0.83605200000000002, 0.82405099999999998, 0.83205200000000001, 0.83605300000000005, 0.82005099999999997, 0.85205399999999998, 0.82005099999999997, 0.84005300000000005, 0.82405099999999998, 0.82405200000000001, 0.82805099999999998, 0.83605200000000002, 0.83605300000000005, 0.82405099999999998, 0.83605200000000002], "base_times": [1.22, 1.22, 1.21, 1.22, 1.22, 1.22, 1.21, 1.21, 1.22, 1.2, 1.22, 1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.22, 1.22, 1.22, 1.21, 1.21, 1.23, 1.22, 1.2, 1.22, 1.21, 1.22, 1.21, 1.21, 1.21, 1.21, 1.21, 1.22, 1.24, 1.23, 1.21, 1.23, 1.22, 1.22, 1.22, 1.23, 1.21, 1.22], "pypy_op_count": 38249}], ["twisted_names", "RawResult", {"changed_times": [0.0072358900144717797, 0.0069492703266157054, 0.0070224719101123594, 0.0072621641249092234, 0.0071839080459770114, 0.0071174377224199285, 0.0070472163495419312, 0.0070126227208976155, 0.0074682598954443615, 0.007052186177715092, 0.0069156293222683261, 0.0072939460247994168, 0.0068728522336769758, 0.0073691967575534268, 0.007215007215007215, 0.0074404761904761901, 0.0068587105624142658, 0.007199424046076314, 0.008539709649871904, 0.0083125519534497094, 0.0074349442379182153, 0.0085689802913453302, 0.0082850041425020712, 0.0084745762711864406, 0.0078926598263614842, 0.0082850041425020712, 0.0073367571533382242, 0.0068775790921595595, 0.0070028011204481795, 0.0070028011204481795, 0.0072254335260115606, 0.0068587105624142658, 0.0068870523415977963, 0.0068587105624142658, 0.0071123755334281651, 0.0071787508973438618, 0.0069637883008356544, 0.0068917987594762234, 0.0068587105624142658, 0.0070224719101123594, 0.0071174377224199285, 0.0068775790921595595, 0.0069252077562326868, 0.007102272727272727, 0.0068259385665529011, 0.0071073205401563609, 0.0068306010928961746, 0.0068446269678302529, 0.0071225071225071226, 0.0073583517292126564], "base_times": [0.0095785440613026813, 0.0096061479346781949, 0.0096061479346781949, 0.0096246390760346481, 0.009643201542912247, 0.0096153846153846159, 0.0095877277085330784, 0.0096339113680154135, 0.009643201542912247, 0.0096899224806201549, 0.0096525096525096523, 0.0096153846153846159, 0.0096711798839458421, 0.0096061479346781949, 0.0096339113680154135, 0.0096339113680154135, 0.0096339113680154135, 0.0096525096525096523, 0.0096339113680154135, 0.0096153846153846159, 0.0096525096525096523, 0.0096246390760346481, 0.0096525096525096523, 0.0096618357487922701, 0.0096153846153846159, 0.0096339113680154135, 0.0096246390760346481, 0.0096525096525096523, 0.0095969289827255271, 0.0096339113680154135, 0.0096153846153846159, 0.009643201542912247, 0.0096153846153846159, 0.0096618357487922701, 0.0096246390760346481, 0.0096339113680154135, 0.0096339113680154135, 0.0096153846153846159, 0.0096153846153846159, 0.0096339113680154135, 0.009643201542912247, 0.0096711798839458421, 0.0096246390760346481, 0.0096153846153846159, 0.0096153846153846159, 0.0096153846153846159, 0.0095693779904306216, 0.0096339113680154135, 0.0096061479346781949, 0.0096618357487922701], "pypy_op_count": 59543}]], "branch": "trunk", "revision": 0} \ No newline at end of file diff --git a/talk/icooolps2011/benchmarks/paper-no-map.json b/talk/icooolps2011/benchmarks/paper-no-map.json new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/benchmarks/paper-no-map.json @@ -0,0 +1,1 @@ +{"options": "", "results": [["crypto_pyaes", "RawResult", {"changed_times": [0.80070614814800001, 0.24446511268599999, 0.21440696716300001, 0.21005606651299999, 0.191078901291, 0.189896821976, 0.192066192627, 0.21261715888999999, 0.190570116043, 0.18950009345999999, 0.18856883049000001, 0.189228057861, 0.19276595115699999, 0.19563889503500001, 0.188343048096, 0.19165682792700001, 0.21585011482200001, 0.18937397003199999, 0.18807387351999999, 0.18882012367199999, 0.18847298622100001, 0.18661999702500001, 0.185885906219, 0.18708920478800001, 0.18743515014600001, 0.18803501129200001, 0.21667003631599999, 0.18657207488999999, 0.18675780296300001, 0.187005996704, 0.189768791199, 0.19051098823500001, 0.19009208679199999, 0.18730592727699999, 0.18733811378500001, 0.21488809585599999, 0.186767101288, 0.186673879623, 0.18684887886000001, 0.18790793418900001, 0.18985700607299999, 0.18768811225900001, 0.186553001404, 0.189339876175, 0.216083049774, 0.187035083771, 0.18761587142899999, 0.184970140457, 0.186705112457, 0.18705582618700001], "base_times": [2.8761439323400002, 2.8909349441500001, 2.88596701622, 2.8887419700599999, 2.8626790046699999, 2.8431601524399999, 2.8557169437400001, 2.8636648654900001, 2.818557024, 2.8199019432100001, 2.8230979442600002, 2.8212609291100001, 2.8217089176200001, 2.8451290130600002, 2.8186268806500001, 2.8221819400800001, 2.8221209049199998, 2.81760311127, 2.8180530071300001, 2.8321051597600002, 2.8173627853399998, 2.8177630901300001, 2.82012104988, 2.81740999222, 2.8193418979599998, 2.8263080120100001, 2.82064890862, 2.81820297241, 2.8220720291100001, 2.81714200974, 2.81961297989, 2.8539819717400001, 2.8188560008999999, 2.8181068897200001, 2.8248088359799999, 2.8201129436499999, 2.8196671009099998, 2.8228759765600002, 2.8181128501899999, 2.8209750652299999, 2.8236408233599999, 2.8188800811800001, 2.8190958499900001, 2.823595047, 2.8195970058399999, 2.8208651542699998, 2.8229200840000002, 2.8206491470300001, 2.8207530975299999, 2.8221518993400001], "pypy_op_count": 12321}], ["django", "RawResult", {"changed_times": [0.169461011887, 0.157292842865, 0.167876005173, 0.16859984397899999, 0.157007932663, 0.16661214828500001, 0.16709780693099999, 0.158396959305, 0.16764497756999999, 0.16780114173899999, 0.15792322158800001, 0.16705608367899999, 0.16804099082900001, 0.158174991608, 0.167434930801, 0.167335987091, 0.158180952072, 0.16768193244900001, 0.16715288162200001, 0.15923190116899999, 0.16807389259300001, 0.166764974594, 0.15653395652800001, 0.165813922882, 0.16592597961399999, 0.156168937683, 0.16561484336900001, 0.165637969971, 0.15627408027600001, 0.16550803184499999, 0.16584014892599999, 0.15660881996199999, 0.165728092194, 0.16585898399400001, 0.15640401840199999, 0.16607403755200001, 0.165366888046, 0.15688419342000001, 0.16626811027499999, 0.16564798355099999, 0.156342029572, 0.166208982468, 0.165621042252, 0.15766906738299999, 0.165865182877, 0.165719032288, 0.156305074692, 0.16582894325299999, 0.16564798355099999, 0.15607190132099999], "base_times": [0.99104809761000001, 0.99019002914400001, 0.98981404304499998, 0.98979902267499997, 0.99461102485700004, 1.00590491295, 0.99995803832999997, 0.995641946793, 0.99006295204200001, 0.99033617973300003, 0.98937892913799996, 0.99356412887599999, 0.98993802070600001, 0.99835586547900002, 0.99995708465599997, 0.98998999595600001, 0.99045300483700005, 0.99191999435400002, 0.98934102058399997, 0.99088382720900003, 0.99093604087800002, 0.99022388458300004, 0.99039411544800005, 0.98973512649499995, 0.98986792564399995, 0.99014210701000005, 0.98989295959500001, 0.990818977356, 0.98961305618299999, 0.99155688285800003, 0.98957419395400004, 0.99049687385600005, 0.991128921509, 0.99027013778700002, 0.99024200439499999, 0.99034094810499995, 0.99034786224399995, 0.99037384986900001, 0.99356508254999998, 0.98970007896400003, 0.99044799804700001, 0.99251890182500002, 0.99049592018099997, 0.99095106124900001, 0.99109697341900005, 0.98958420753499998, 0.98957490921000002, 0.99025392532300005, 0.99014616012599999, 0.99147486686700004], "pypy_op_count": 9505}], ["fannkuch", "RawResult", {"changed_times": [0.52387905120800005, 0.38838100433299999, 0.389425992966, 0.38214898109399997, 0.38263297080999997, 0.38314104080200001, 0.38060712814300002, 0.38057994842499998, 0.380237102509, 0.37955498695399997, 0.38240909576400001, 0.379508972168, 0.38250398635900001, 0.38622903823900001, 0.38931012153599998, 0.39118909835799998, 0.383769989014, 0.38249492645299998, 0.381869792938, 0.37905693054200001, 0.38123798370400003, 0.38011598587000001, 0.37776613235500001, 0.37988996505700001, 0.378247976303, 0.37826800346400002, 0.37909913063, 0.38092207908600001, 0.37733602523799997, 0.37835693359400002, 0.379323959351, 0.378121852875, 0.37631583213800002, 0.379039049149, 0.378340005875, 0.38104391098000001, 0.38127088546799998, 0.38100600242600002, 0.37706613540599998, 0.37875008583100001, 0.37791609764099998, 0.37844800949099999, 0.37935900688200003, 0.37850189209000001, 0.37847399711599999, 0.379024982452, 0.37935996055600002, 0.37818908691399999, 0.37788605689999999, 0.38137888908399997], "base_times": [1.89367079735, 1.89767789841, 1.89520311356, 1.89131212234, 1.8964869976000001, 1.8933100700400001, 1.89653587341, 1.89336800575, 1.89290499687, 1.8968389034299999, 1.88807201385, 1.89131402969, 1.89660310745, 1.8883712291700001, 1.89316296577, 1.8990199565899999, 1.89097595215, 1.8341188430799999, 1.8367011547100001, 1.8353900909400001, 1.83232498169, 1.8348801135999999, 1.8327097892799999, 1.83133506775, 1.8352148532899999, 1.8340818882000001, 1.83196520805, 1.83504509926, 1.8337571620899999, 1.8321568965899999, 1.85116481781, 1.8379881382000001, 1.8337421417199999, 1.83944416046, 1.83492207527, 1.8336958885200001, 1.83386111259, 1.83364701271, 1.8323760032700001, 1.8355028629300001, 1.83366513252, 1.8313841819800001, 1.83526396751, 1.8332481384299999, 1.83197116852, 1.8381581306500001, 1.83404302597, 1.8376998901399999, 1.83504390717, 1.8367159366600001], "pypy_op_count": 4169}], ["go", "RawResult", {"changed_times": [0.38098311424300002, 0.42700386047400002, 0.30936503410299998, 0.300760030746, 0.33371305465700002, 0.29512786865200002, 0.32638502121000001, 0.298727989197, 0.267471075058, 0.29366803169299999, 0.272849082947, 0.37071084976200003, 0.25533413887, 0.257368087769, 0.33718800544700001, 0.252283096313, 0.250221967697, 0.273751020432, 0.26554107665999999, 0.27126598358199999, 0.24909901618999999, 0.251616954803, 0.255005121231, 0.25034594535799998, 0.244755983353, 0.24308109283400001, 0.26249217987099999, 0.27031993865999998, 0.25340199470500002, 0.25624299049400001, 0.26856517791700002, 0.24330210685699999, 0.24602603912400001, 0.27459907531700001, 0.28719305992100003, 0.23903799057, 0.241873979568, 0.26826500892600003, 0.24237203598000001, 0.24613595008899999, 0.26453518867499998, 0.237096071243, 0.23868608474700001, 0.237158060074, 0.24065089225799999, 0.23887395858800001, 0.23681402206400001, 0.25986003875699998, 0.23849296569799999, 0.23707795143099999], "base_times": [0.93691492080700001, 0.93829798698400002, 0.93567395210299997, 0.93827009201, 0.93328309059100001, 0.93655514717099997, 0.93545007705700001, 0.94270396232599996, 0.93520712852499999, 0.93657517433199999, 0.94970107078599997, 0.93767499923700004, 0.93678498268099997, 0.94195103645300005, 0.94256520271300004, 0.94166398048400002, 0.93724298477199997, 0.93795490264900006, 0.94096207618700001, 0.93670392036399996, 0.93974900245699999, 0.93673920631399998, 0.93699407577500005, 0.94122409820599995, 0.940443992615, 0.93337392806999997, 0.93855500221300003, 0.93602800369299999, 0.93665480613700003, 0.93470001220700005, 0.93879508972199999, 0.94126296043400004, 0.93706107139600003, 0.94096899032600001, 0.93657302856400004, 0.93728089332599995, 0.94052004814099999, 0.93431997299199998, 0.94318103790300001, 0.93863201141399999, 0.93873286247300003, 0.93390798568699995, 0.94470000266999998, 0.939548969269, 0.93657493591300001, 0.93600392341600003, 0.93603301048300003, 0.94028210639999998, 0.93971800804100003, 0.93484497070299999], "pypy_op_count": 63979}], ["html5lib", "RawResult", {"changed_times": [11.1823220253, 7.7326879501299999, 7.3597109317799996, 7.0179579258000002, 6.9909389018999999, 6.8228709697700003, 6.9214730262800002, 6.8393721580499998, 6.7466871738399998, 6.7032911777499997, 6.66130614281, 6.8551890850100001, 6.6614270210299997, 6.6187050342599996, 6.7008531093599997, 6.6650779247300003, 6.6702318191499996, 6.6326270103500002, 6.7283911705000001, 6.6403000354800001, 6.8122789859799999, 6.6162760257700004, 6.6475908756299997, 6.6290700435599996, 6.7487308978999998, 6.65472507477, 6.6219398975399999, 6.6210329532600003, 6.6486320495599998, 6.6354758739499999, 6.6304919719699997, 6.6075370311699997, 6.6708731651299997, 6.6109941005700001, 6.6679408550300003, 6.6067049503300002, 6.6070420741999998, 6.62789797783, 6.6048879623400003, 6.6482529640200001, 6.6356680393199996, 6.62024402618, 6.6268410682700001, 6.60153079033, 6.6115880012500003, 6.6211459636700001, 6.62899804115, 6.5713610649099996, 6.57791996002, 6.5746579170199997], "base_times": [14.3756690025, 14.588768958999999, 14.542870044700001, 14.5313310623, 14.514137029600001, 14.680436134300001, 14.638053894, 14.5883600712, 14.5617930889, 14.6932079792, 14.5285651684, 14.6771299839, 14.637875080100001, 14.559569120400001, 14.5244939327, 14.5072751045, 14.493720054600001, 14.662851095200001, 14.631731987, 14.8587107658, 14.5584938526, 14.5348029137, 14.5132460594, 14.6536669731, 14.6300861835, 14.604119062400001, 14.621867895099999, 14.5418081284, 14.515989065199999, 14.6539049149, 14.6713581085, 14.5698750019, 14.550272941599999, 14.519284963600001, 14.524355888400001, 14.673545837400001, 14.6487028599, 14.6070568562, 14.555631876, 14.5273010731, 14.513236045799999, 14.640599012399999, 14.618520975099999, 14.634526967999999, 14.568900108299999, 14.5590200424, 14.516989946400001, 14.4869439602, 14.678426980999999, 14.601620197300001], "pypy_op_count": 123698}], ["meteor-contest", "RawResult", {"changed_times": [0.389729976654, 0.30653190612800002, 0.30098009109500001, 0.29808712005600002, 0.29854393005399998, 0.305530071259, 0.30291104316700002, 0.29515194892899999, 0.29600310325599999, 0.29558587074300002, 0.29507207870500002, 0.29624199867200002, 0.31439614296000001, 0.29568696022000002, 0.29538607597400002, 0.30414581298799997, 0.29694294929499998, 0.295091867447, 0.29515194892899999, 0.29495692253099998, 0.29502797126800001, 0.29475212097199999, 0.29505491256700001, 0.294774055481, 0.29433202743499998, 0.29526400566099997, 0.30004000663800001, 0.29482817649800003, 0.30602288246199999, 0.29294109344500002, 0.29591894149800002, 0.30043578147900002, 0.29402518272400002, 0.30506992340099998, 0.293848991394, 0.29354810714700003, 0.29276084899900001, 0.294133901596, 0.29360413551300002, 0.29375696182299998, 0.29432392120400003, 0.30099511146500002, 0.294914007187, 0.294219017029, 0.29416489601099999, 0.30054616928099998, 0.294075012207, 0.29369592666599997, 0.29366111755399998, 0.29602789878800001], "base_times": [0.34793114662199998, 0.347922086716, 0.34756803512599999, 0.34772706031799999, 0.34797191619899998, 0.34859108924900001, 0.34831404686, 0.34751319885300003, 0.34788608551, 0.34809017181399998, 0.35126709938, 0.34787893295299999, 0.34783411026, 0.34863495826699997, 0.34818983077999999, 0.34882092475900001, 0.34761095047000001, 0.34802293777499999, 0.34821581840499999, 0.347784996033, 0.34780478477499999, 0.34858989715599997, 0.34791779518100002, 0.34797191619899998, 0.37295889854399999, 0.35242199897799997, 0.34825491905200001, 0.34745788574199998, 0.34793901443500003, 0.34815001487699998, 0.347759962082, 0.347604990005, 0.34786200523400002, 0.347841024399, 0.34818601608299998, 0.348232984543, 0.34745812416100003, 0.34798908233600001, 0.34835290908799998, 0.34737396240200002, 0.35109686851499999, 0.34818506240800001, 0.348004102707, 0.34782600402800001, 0.348240852356, 0.34802198410000001, 0.34789705276499999, 0.34830498695399997, 0.348255157471, 0.34893512725800002], "pypy_op_count": 6693}], ["pyflate-fast", "RawResult", {"changed_times": [1.77924084663, 1.7499718666099999, 1.7553339004499999, 1.77309989929, 1.73506999016, 1.73821187019, 1.7857708931, 1.7435629367800001, 1.7530391216300001, 1.7554440498399999, 1.7483851909599999, 1.7532851696, 1.7102479934699999, 1.7559821605699999, 1.7220840454099999, 1.72474503517, 1.7562379836999999, 1.72131514549, 1.7181310653699999, 1.72855496407, 1.74309301376, 1.72148799896, 1.7168669700600001, 1.7234990596799999, 1.72578811646, 1.73958301544, 1.70735192299, 1.7177979946099999, 1.72203207016, 1.7393941879299999, 1.7177820205700001, 1.7095940113100001, 1.7220859527600001, 1.75940990448, 1.71599507332, 1.7418971061699999, 1.7417228221900001, 1.7429790496799999, 1.7330031395000001, 1.7521979808799999, 1.7293059825899999, 1.7236788272900001, 1.74909305573, 1.72528004646, 1.73950982094, 1.7200028896299999, 1.74826383591, 1.72047710419, 1.7398540973700001, 1.7208921909299999], "base_times": [3.19568920135, 3.2313849925999998, 3.3100800514199999, 3.3316271305099998, 3.2652699947400001, 3.25343608856, 3.2147419452700001, 3.2222428321800001, 3.2211480140700002, 3.2460720539099999, 3.2167119979900001, 3.2129678726200002, 3.2051751613600001, 3.2165479659999998, 3.2282938957199998, 3.2154080867800001, 3.2234318256400001, 3.2816908359500001, 3.2110531330100001, 3.2252879142799999, 3.2141721248600001, 3.21491789818, 3.2171380519900001, 3.2112758159600001, 3.2129719257399998, 3.23816394806, 3.2312932014500002, 3.2158501148199998, 3.2248339653000002, 3.21015405655, 3.2103090286299998, 3.2301108837100001, 3.2142219543500001, 3.2345170974699999, 3.21375393867, 3.2142491340600001, 3.21567988396, 3.2130990028399999, 3.2259600162500002, 3.2109870910599998, 3.2319450378400001, 3.2421219348900001, 3.2158629894300002, 3.2190780639600001, 3.2185781001999998, 3.2217268943800002, 3.2166821956599998, 3.2219679355599999, 3.2297730445899999, 3.2674329280899999], "pypy_op_count": 25917}], ["raytrace-simple", "RawResult", {"changed_times": [0.336019039154, 0.33502411842300001, 0.33585810661299997, 0.33099198341399999, 0.32965302467300001, 0.389579057693, 0.337717056274, 0.37618088722199999, 0.32250213623000001, 0.33570599556000003, 0.32359910011300003, 0.32325315475499999, 0.32395005226099999, 0.323508024216, 0.32349205017100002, 0.32281303405799999, 0.32240200042700001, 0.32346606254600002, 0.32351708412199998, 0.32211399078399999, 0.32435011863699997, 0.31972503662099999, 0.32125711440999999, 0.32106280326800002, 0.32382512092600002, 0.322746992111, 0.32273793220500002, 0.32166314125099998, 0.34030103683500001, 0.369266033173, 0.320456027985, 0.31931900978099997, 0.32179522514300002, 0.32028007507299999, 0.31924414634699999, 0.32179403305100002, 0.322135925293, 0.333813905716, 0.31983995437599999, 0.31937813758900002, 0.31884217262300002, 0.318766117096, 0.32013797760000001, 0.31986403465300001, 0.31865096092200001, 0.328010082245, 0.31907296180700001, 0.31895804405200001, 0.31893086433399997, 0.31869602203399999], "base_times": [2.7810299396499998, 2.7041540145899998, 2.75475311279, 2.7045049667400001, 2.75886893272, 2.7108929157300001, 2.7105798721299998, 2.70560097694, 2.7007820606199999, 2.7023158073400002, 2.7036619186399999, 2.7012720107999999, 2.7080771923100002, 2.7075979709600002, 2.70902681351, 2.7337849140200001, 2.7205278873399998, 2.7335181236300001, 2.7064399719200001, 2.7056748866999998, 2.7067968845400001, 2.7085909843399998, 2.7099859714500001, 2.75483298302, 2.7000720500900002, 2.75279092789, 2.7100269794499998, 2.70810413361, 2.70530700684, 2.7048008441900002, 2.7046880722000002, 2.7031819820399998, 2.7028548717500001, 2.7042701244399998, 2.7050521373700001, 2.7015271186800001, 2.7104179859199999, 2.7071681022599998, 2.7061531543699999, 2.70490193367, 2.7101418972000002, 2.75673794746, 2.7135739326500001, 2.7070910930599998, 2.7065651416800001, 2.7019891738899999, 2.7066369056699999, 2.70338106155, 2.7088000774399998, 2.71031498909], "pypy_op_count": 29242}], ["richards", "RawResult", {"changed_times": [0.079975128173799995, 0.054801940918000003, 0.056576013565100003, 0.052304029464700003, 0.052218914031999998, 0.0511269569397, 0.064401149749800005, 0.051860094070400001, 0.050696849822999997, 0.051559209823600002, 0.050716876983600001, 0.051267147064199999, 0.0522649288177, 0.050228118896500001, 0.049854993820199998, 0.050549030304000001, 0.050457954406699999, 0.0502300262451, 0.0510759353638, 0.052322149276700002, 0.050407886505100002, 0.050394058227500003, 0.050194025039700003, 0.051253080367999998, 0.050671100616500001, 0.050168991088900002, 0.050063133239699999, 0.051898956298800002, 0.050014972686799997, 0.0495669841766, 0.050374984741200002, 0.0496740341187, 0.051039218902599999, 0.050569772720299999, 0.0495622158051, 0.049981832504300001, 0.050631999969499998, 0.049989938736000002, 0.049149990081800002, 0.048947095870999997, 0.048960924148600003, 0.048808097839400003, 0.0493609905243, 0.049360036849999997, 0.048986911773700001, 0.049046039581300001, 0.048752069473299998, 0.050184965133700001, 0.049013853073100001, 0.049018144607500001], "base_times": [0.36576080322299998, 0.36790394783000002, 0.36396622657799999, 0.36132907867399999, 0.36440110206600002, 0.36524105072000002, 0.36377906799300003, 0.36550617217999998, 0.36175894737199998, 0.36562514305100002, 0.36367201805100002, 0.36227989196799998, 0.363169908524, 0.36447310447699999, 0.36764693260199999, 0.36525583267200001, 0.36775493621799998, 0.36778402328499998, 0.36435198783900002, 0.36510801315300001, 0.362571001053, 0.36416816711400002, 0.36345291137699998, 0.36438107490499999, 0.36563396453899999, 0.36416983604399999, 0.362910985947, 0.36602306365999998, 0.36392211914099998, 0.36199998855600002, 0.36394095420799999, 0.36691188812300002, 0.36393785476700002, 0.36612701415999999, 0.36289906501800001, 0.36232995986900002, 0.36090111732500002, 0.364420890808, 0.36394906044000003, 0.361227989197, 0.36387896537800002, 0.36320590972900002, 0.36275315284699999, 0.36340713500999999, 0.36164999008199999, 0.36467385292100002, 0.36805486679100002, 0.36289596557600001, 0.36394786834699999, 0.365718841553], "pypy_op_count": 13466}], ["spambayes", "RawResult", {"changed_times": [0.29526209831200001, 0.38006997108500001, 0.33018302917499998, 0.36366391181899999, 0.29423999786400001, 0.26969599723799997, 0.31427311897299998, 0.28821587562599998, 0.203219175339, 0.233620882034, 0.24104809761000001, 0.20336890220600001, 0.232273101807, 0.19085001945499999, 0.42926096916200002, 0.28806519508400003, 0.20585989952100001, 0.18089318275499999, 0.22238802909899999, 0.16893601417500001, 0.16616892814600001, 0.220479011536, 0.180377006531, 0.16054391861, 0.215197086334, 0.183736085892, 0.18489217758199999, 0.193224906921, 0.15582609176600001, 0.16804194450400001, 0.21996998786899999, 0.195140838623, 0.15270996093799999, 0.17979001998899999, 0.15489506721499999, 0.18037295341500001, 0.193116903305, 0.16343092918400001, 0.159971952438, 0.14782285690300001, 0.17313408851600001, 0.14948415756200001, 0.158791065216, 0.180076122284, 0.16927504539499999, 0.144157886505, 0.14244699478100001, 0.143519878387, 0.18556189537000001, 0.14216208457900001], "base_times": [0.29886007309000001, 0.29901695251499999, 0.298616886139, 0.29834985733000002, 0.29854989051800002, 0.29814195632899998, 0.29830312728899999, 0.29862213134799998, 0.299257040024, 0.29829502105700001, 0.29837298393200001, 0.29873704910299997, 0.29843211174000001, 0.29907298088099998, 0.29820203781100002, 0.29881405830399999, 0.29845905303999998, 0.29935598373400002, 0.29840397834799998, 0.29851102828999998, 0.29995799064599998, 0.30102396011400001, 0.298897981644, 0.29899787902800001, 0.29958200454700001, 0.298629045486, 0.29906296729999998, 0.29873585700999999, 0.298842906952, 0.29817581176800001, 0.29846906662, 0.29850506782500003, 0.29797315597500001, 0.298891067505, 0.29849910736099999, 0.29878592491099998, 0.298606157303, 0.29909110069299999, 0.29859304428099998, 0.298247098923, 0.29934310913099998, 0.29897117614699997, 0.29898595809900003, 0.29898214340200002, 0.298913955688, 0.29883098602300001, 0.29860901832600001, 0.29920983314499999, 0.29926896095299999, 0.29866409301800001], "pypy_op_count": 61657}], ["spectral-norm", "RawResult", {"changed_times": [0.102906227112, 0.039610862731900003, 0.038799047470099997, 0.038051128387500001, 0.038481950759899998, 0.038103103637700003, 0.035501003265399997, 0.038250207900999998, 0.036159992218000002, 0.0359559059143, 0.036381006240799998, 0.035027980804399998, 0.034610033035299997, 0.035240888595599998, 0.035089969635000003, 0.0350589752197, 0.035221815109300003, 0.035317897796600002, 0.035022974014299997, 0.034958124160799998, 0.035120010376000002, 0.034971952438399997, 0.034876823425299998, 0.035159111022900001, 0.035193920135499997, 0.034881114959699998, 0.035170078277599999, 0.035334825515699997, 0.034914970397900001, 0.0352718830109, 0.034760951995800003, 0.035120964050299998, 0.035072088241599998, 0.035100936889599997, 0.0349960327148, 0.0348551273346, 0.035269021987900002, 0.034928083419799999, 0.035053968429599999, 0.035284996032700001, 0.034821987152099998, 0.0350239276886, 0.034815073013300002, 0.034934043884299999, 0.034979820251500003, 0.035027027130100002, 0.035109043121299997, 0.034989118575999997, 0.035156965255700001, 0.036545991897600001], "base_times": [0.48320317268399998, 0.48130106925999999, 0.48081088066099997, 0.48078799247699999, 0.48163819313, 0.48205304145799999, 0.48261499404899999, 0.48477101325999999, 0.48043608665499998, 0.48111701011699998, 0.48086094856299999, 0.48102521896400002, 0.48187494277999998, 0.47980594635000001, 0.48229002952599997, 0.478718996048, 0.48013806343100002, 0.47903895378099998, 0.48454594612099999, 0.48125410079999997, 0.48194909095799998, 0.48057389259299998, 0.48293781280499998, 0.47997498512300002, 0.488119125366, 0.48146200180100002, 0.48207902908299999, 0.48173379898099999, 0.48159503936800002, 0.48306012153599998, 0.48202705383299999, 0.48256206512499999, 0.48084712028499998, 0.48221206664999999, 0.48215079307600001, 0.48163104057299999, 0.48037314414999999, 0.48194503784199999, 0.48099803924599999, 0.48156309127800001, 0.48258996009799998, 0.481215953827, 0.48318195342999998, 0.48317313194299999, 0.48323607444799999, 0.48114085197399997, 0.482251882553, 0.48231697082500002, 0.48298287391700001, 0.48834586143499997], "pypy_op_count": 3208}], ["telco", "RawResult", {"changed_times": [0.32802100000000001, 0.208013, 0.18401100000000001, 0.21601300000000001, 0.18001200000000001, 0.21201300000000001, 0.200013, 0.180011, 0.16400999999999999, 0.172011, 0.16400999999999999, 0.176011, 0.16400999999999999, 0.16001000000000001, 0.172011, 0.16001000000000001, 0.16001000000000001, 0.16400999999999999, 0.16001000000000001, 0.172011, 0.16001000000000001, 0.16400999999999999, 0.16001000000000001, 0.15601000000000001, 0.15601000000000001, 0.15601000000000001, 0.16001000000000001, 0.16400999999999999, 0.15601000000000001, 0.17601, 0.15201000000000001, 0.15201000000000001, 0.15601000000000001, 0.15600900000000001, 0.15601000000000001, 0.16400999999999999, 0.15200900000000001, 0.15601000000000001, 0.15601000000000001, 0.15600900000000001, 0.15601000000000001, 0.15601000000000001, 0.15600900000000001, 0.15601000000000001, 0.16801099999999999, 0.15600900000000001, 0.15601000000000001, 0.15601000000000001, 0.15600900000000001, 0.15601000000000001], "base_times": [1.21, 1.2, 1.21, 1.21, 1.2, 1.2, 1.21, 1.2, 1.21, 1.1899999999999999, 1.21, 1.2, 1.21, 1.2, 1.21, 1.2, 1.21, 1.21, 1.2, 1.2, 1.2, 1.21, 1.2, 1.2, 1.21, 1.21, 1.2, 1.2, 1.21, 1.2, 1.21, 1.2, 1.2, 1.22, 1.2, 1.2, 1.21, 1.21, 1.2, 1.2, 1.21, 1.2, 1.2, 1.22, 1.2, 1.1899999999999999, 1.21, 1.21, 1.2, 1.21], "pypy_op_count": 20272}], ["twisted_names", "RawResult", {"changed_times": [0.0053561863952865559, 0.005837711617046118, 0.0054229934924078091, 0.0054884742041712408, 0.0053619302949061663, 0.0056369785794813977, 0.0053850296176628969, 0.0056022408963585435, 0.0053533190578158455, 0.005485463521667581, 0.0053276505061267982, 0.0054377379010331706, 0.0052910052910052907, 0.0056085249579360631, 0.0052882072977260709, 0.005434782608695652, 0.0052966101694915252, 0.0054318305268875608, 0.005279831045406547, 0.0054200542005420054, 0.005279831045406547, 0.0052882072977260709, 0.005411255411255411, 0.0052854122621564482, 0.0054764512595837896, 0.0052826201796090863, 0.0054614964500273077, 0.0052910052910052907, 0.0054318305268875608, 0.0052966101694915252, 0.005246589716684155, 0.0053937432578209281, 0.005263157894736842, 0.0053937432578209281, 0.0052770448548812663, 0.0054406964091403701, 0.005235602094240838, 0.0054734537493158182, 0.0052521008403361349, 0.0052659294365455505, 0.0053908355795148251, 0.0053304904051172707, 0.0054054054054054057, 0.005263157894736842, 0.0053792361484669175, 0.005263157894736842, 0.0052714812862414339, 0.0053937432578209281, 0.0052273915316257188, 0.0054200542005420054], "base_times": [0.0096246390760346481, 0.0096525096525096523, 0.0096246390760346481, 0.0097087378640776691, 0.009643201542912247, 0.0096805421103581795, 0.0096339113680154135, 0.0097181729834791061, 0.0096618357487922701, 0.009643201542912247, 0.0096618357487922701, 0.0096246390760346481, 0.009643201542912247, 0.0096618357487922701, 0.0096525096525096523, 0.0096525096525096523, 0.0096153846153846159, 0.0096246390760346481, 0.0096339113680154135, 0.0096246390760346481, 0.0096711798839458421, 0.0096339113680154135, 0.0096246390760346481, 0.0096525096525096523, 0.0096618357487922701, 0.0096711798839458421, 0.0096339113680154135, 0.0096525096525096523, 0.0096061479346781949, 0.0096711798839458421, 0.0096525096525096523, 0.0096899224806201549, 0.009643201542912247, 0.0098619329388560158, 0.0097370983446932822, 0.0096525096525096523, 0.0096339113680154135, 0.0096153846153846159, 0.0096339113680154135, 0.0096246390760346481, 0.0096618357487922701, 0.0096246390760346481, 0.0096618357487922701, 0.0096618357487922701, 0.0096618357487922701, 0.0096339113680154135, 0.0096525096525096523, 0.010060362173038229, 0.010319917440660475, 0.010288065843621399], "pypy_op_count": 42722}]], "branch": "trunk", "revision": 0} \ No newline at end of file diff --git a/talk/icooolps2011/benchmarks/paper-baseline.json b/talk/icooolps2011/benchmarks/paper-baseline.json new file mode 100644 --- /dev/null +++ b/talk/icooolps2011/benchmarks/paper-baseline.json @@ -0,0 +1,1 @@ +{"options": "", "results": [["crypto_pyaes", "RawResult", {"changed_times": [0.72780013084399997, 0.20544290542599999, 0.16985917091399999, 0.200357913971, 0.182709217072, 0.16569614410399999, 0.18768596649200001, 0.16354012489299999, 0.163209915161, 0.16358494758600001, 0.163627147675, 0.16392707824700001, 0.169529914856, 0.17107892036399999, 0.16498899459800001, 0.19305896758999999, 0.16402792930599999, 0.164088964462, 0.16301798820499999, 0.16370892524700001, 0.16323089599599999, 0.163249015808, 0.16316604614300001, 0.163213014603, 0.16399383544900001, 0.192234039307, 0.16315603256200001, 0.16326689720199999, 0.16337895393400001, 0.16616201400799999, 0.163379907608, 0.16339397430399999, 0.16291189193700001, 0.16279506683299999, 0.19032192230200001, 0.16397500038099999, 0.16263699531600001, 0.162869215012, 0.16262483596800001, 0.162625074387, 0.16299414634699999, 0.16412782669100001, 0.16268706321699999, 0.19027280807499999, 0.16272091865499999, 0.16293215751599999, 0.163349151611, 0.16336798667899999, 0.162735939026, 0.16331696510300001], "base_times": [2.8203029632600001, 2.82395195961, 2.8182699680300001, 2.82384204865, 2.8412010669700001, 2.82694506645, 2.82518291473, 2.8280220031700001, 2.8353188037899999, 2.8247430324599998, 2.8216609954799998, 2.83752584457, 2.82085800171, 2.8384521007500001, 2.8283910751299999, 2.8276689052599999, 2.82473993301, 2.8185532093000001, 2.8221321106000001, 2.8310389518700001, 2.8178269863100001, 2.82401704788, 2.8234288692499998, 2.8225979804999999, 2.8222818374599998, 2.82368803024, 2.82551598549, 2.8231728076899998, 2.8328769207, 2.8207099437699998, 2.81998109818, 2.8251597881300001, 2.8187401294700001, 2.8217029571499999, 2.8245930671699999, 2.8280639648400001, 2.8301739692700001, 2.8280618190800002, 2.82529497147, 2.82448101044, 2.8260328769699998, 2.8200371265399999, 2.82417798042, 2.8277380466499999, 2.82353782654, 2.8206551074999999, 2.8307061195399998, 2.8227949142500002, 2.8304541110999999, 2.83272123337], "pypy_op_count": 12306}], ["django", "RawResult", {"changed_times": [0.19859194755599999, 0.14788007736200001, 0.14791393280000001, 0.15727496147200001, 0.148479938507, 0.149163007736, 0.14894199371299999, 0.15776586532600001, 0.14808511733999999, 0.147895097733, 0.14722394943200001, 0.15661501884500001, 0.148326158524, 0.14753913879399999, 0.14774703979500001, 0.15663313865699999, 0.14776611328100001, 0.14796304702800001, 0.147610187531, 0.157119035721, 0.14740300178499999, 0.147317886353, 0.14743995666500001, 0.15613794326800001, 0.14753007888799999, 0.14753603935199999, 0.147581100464, 0.15626621246299999, 0.14747285842899999, 0.14736413955700001, 0.14710807800299999, 0.156499862671, 0.14805603027299999, 0.14762496948199999, 0.14736008644099999, 0.15604901313799999, 0.147682905197, 0.14749097824099999, 0.14695692062400001, 0.15633511543299999, 0.147258043289, 0.14688491821300001, 0.14635014533999999, 0.15575003624, 0.146630048752, 0.14733195304899999, 0.14650583267200001, 0.155790805817, 0.14689493179300001, 0.14676094055200001], "base_times": [0.98852610588099998, 0.98855113983200005, 0.98795104026799996, 0.98740696907000003, 0.98748707771300004, 0.98786282539400005, 1.0120139122, 0.98785114288300002, 0.98786592483500002, 0.98862195014999998, 0.987090110779, 0.99770188331599996, 0.98769116401699997, 0.98845887184100001, 0.98714089393600002, 0.98871612548800003, 0.98751497268699995, 0.99157619476299996, 0.98774886131299999, 0.98788404464699997, 0.98708987236000001, 0.99108099937399996, 0.98800587654100003, 0.98886919021599995, 0.98823690414400001, 0.98885416984600005, 0.987541913986, 0.98829197883599995, 0.98799490928599998, 0.98843193054199996, 0.98783206939699997, 0.98812007904099997, 0.98748683929400005, 0.98876690864600003, 0.98845410346999996, 0.98857808113099999, 0.987577915192, 0.98890900611900001, 0.98816299438499999, 0.98857998847999995, 0.98825502395599996, 0.98812794685399996, 0.98803901672399996, 0.99350500106799999, 0.98744702339199997, 0.98868703842200001, 0.99133110046399997, 0.99131393432600001, 0.98810410499599999, 0.98844885826100004], "pypy_op_count": 8374}], ["fannkuch", "RawResult", {"changed_times": [0.54128599166900004, 0.39794802665700002, 0.39342308044399998, 0.41779112815899999, 0.39200592040999999, 0.39131689071699999, 0.39341497421299998, 0.39025497436500001, 0.388537883759, 0.38954281806899999, 0.38831090927099998, 0.38887500762900001, 0.39032697677599998, 0.39138889312699998, 0.39585185050999999, 0.39125490188599998, 0.38789105415300001, 0.38883614540099998, 0.39338397979700002, 0.39335012435900002, 0.38863301277200002, 0.38742494583100001, 0.38713812828100003, 0.38755297660799998, 0.38791108131399998, 0.39297819137599999, 0.387409925461, 0.38823199272199999, 0.38701319694500003, 0.38588213920600001, 0.387736082077, 0.38797116279600002, 0.38843989372299997, 0.38765501976, 0.38805794715899999, 0.38744592666599997, 0.39486694335900002, 0.38852882385300003, 0.39214205741899999, 0.38689208030700001, 0.38689708709699999, 0.39462089538599998, 0.39907097816499998, 0.38714218139599998, 0.38710999488800002, 0.38751006126400001, 0.387485027313, 0.387592077255, 0.38912010192899998, 0.38744497299199998], "base_times": [1.9541189670600001, 1.9453120231600001, 1.9473388195000001, 1.9439451694500001, 1.9460229873699999, 1.9404349327099999, 1.9453160762799999, 1.94645500183, 1.94157981873, 1.9460811615, 1.9433031082200001, 1.9461688995399999, 1.9498269558000001, 1.9437561035199999, 1.9456009864799999, 1.94660711288, 1.94285321236, 1.9434599876400001, 1.9480879306800001, 1.9442579746199999, 1.9434909820599999, 1.9507761001599999, 1.94450807571, 1.94631886482, 1.9510869979900001, 1.9463620185899999, 1.9438598156, 1.94956207275, 1.94319200516, 1.9674429893500001, 1.9507141113299999, 1.9443831443799999, 1.94647884369, 1.95129108429, 1.9462871551500001, 1.9858520031, 1.99589490891, 1.94509387016, 1.9437561035199999, 1.9457681178999999, 1.9428639411899999, 1.94377088547, 1.9483149051699999, 1.94313907623, 1.94405412674, 1.9491469860099999, 1.9435648918199999, 1.9471759796100001, 1.9444408416700001, 1.9447300434100001], "pypy_op_count": 4169}], ["go", "RawResult", {"changed_times": [0.25292301177999998, 0.36947083473199999, 0.237436056137, 0.28342795372000001, 0.26745104789700003, 0.28551077842700001, 0.24993515014600001, 0.26728606224099999, 0.21486115455599999, 0.17821907997100001, 0.217959880829, 0.28859519958500002, 0.176207065582, 0.22055888175999999, 0.23385000228899999, 0.17909193038900001, 0.16820192337000001, 0.179723978043, 0.181403875351, 0.178298950195, 0.246846914291, 0.172055006027, 0.173453092575, 0.17971897125200001, 0.18662810325599999, 0.177664041519, 0.18287706375099999, 0.19439196586599999, 0.16774606704700001, 0.19510412216199999, 0.18842697143600001, 0.157199144363, 0.15375590324399999, 0.15416789054899999, 0.17006707191500001, 0.169358968735, 0.170048952103, 0.17110419273399999, 0.156311988831, 0.18694901466399999, 0.157498121262, 0.15089702606200001, 0.152234077454, 0.17312717437700001, 0.146532058716, 0.22242999076799999, 0.17533302307099999, 0.15511894226100001, 0.18908810615499999, 0.15341901779200001], "base_times": [0.95646786689800001, 0.94750404357899998, 0.94687509536699999, 0.94459509849500001, 0.94622015953100003, 0.948693990707, 0.94552087783799998, 0.94918799400300002, 0.94300007820099996, 0.94276285171499996, 0.94992494583099996, 0.95113706588699998, 0.94474196434000002, 0.95061016082799998, 0.94758510589599998, 0.94624209404000004, 0.94500207901, 0.94643902778599998, 0.94784402847299998, 0.94582509994499997, 0.94497609138500005, 0.95375013351399995, 0.95045304298400002, 0.95250487327599997, 0.95423793792699996, 0.94715285301200003, 0.94224596023600005, 0.95150017738299997, 0.94440102577200002, 0.94541096687299997, 0.94788098335299997, 0.94571495056199995, 0.94216609001200002, 0.94326806068400004, 0.944446086884, 0.95080494880699995, 0.95071697235099994, 0.94257807731599996, 0.94728708267200001, 0.94544100761399996, 0.94524788856499997, 0.94435787200900001, 0.95366191864000005, 0.94883990287800002, 0.94368410110500001, 0.94965386390700002, 0.95067691802999998, 0.94751787185699998, 0.94783377647400002, 0.94460606574999995], "pypy_op_count": 72595}], ["html5lib", "RawResult", {"changed_times": [9.7627551555600007, 6.50681209564, 5.94796800613, 5.8649179935499998, 5.7451310157800002, 5.7997250556899997, 5.7391381263700003, 5.7921259403200001, 5.61466002464, 5.55873417854, 5.5523180961599996, 5.6580610275299996, 5.60954499245, 5.5401849746699998, 5.6231791973099998, 5.6942481994599996, 5.54234910011, 5.5693290233599999, 5.6106441021000002, 5.5843679904899997, 5.6189818382299999, 5.8686611652399998, 5.65614581108, 5.5545940399199996, 5.6200249195099996, 5.5238997936200001, 5.52264904976, 5.5243220329299998, 5.5626420974700004, 5.5471501350399999, 5.52461123466, 5.5227749347700001, 5.5432789325699998, 5.5053629875199999, 5.5241940021499998, 5.53006386757, 5.5213818550099996, 5.5278069972999999, 5.5097069740300002, 5.56081199646, 5.5728008747100004, 5.5464761257199999, 5.5145790576899998, 5.5486950874299996, 5.5787169933299996, 5.52807784081, 5.4337561130500003, 5.5064508914900001, 5.5125980377200001, 5.5182650089300003], "base_times": [14.5398759842, 14.578344106699999, 14.5152900219, 14.510169982900001, 14.4941039085, 14.672421932200001, 14.6164031029, 14.573220968199999, 14.552697897, 14.488421917, 14.507045984299999, 14.6611778736, 14.6007580757, 14.5496821404, 14.596875905999999, 14.520130157500001, 14.4794309139, 14.664165973699999, 14.5936610699, 14.564710140200001, 14.528239965399999, 14.506567955, 14.494971036900001, 14.621176004400001, 14.598964929599999, 14.5986030102, 14.5909948349, 14.566663026800001, 14.4748489857, 14.602259159100001, 15.0101139545, 14.6093139648, 14.5286970139, 14.527831792800001, 14.501435041400001, 14.6243560314, 14.616846084600001, 14.5617120266, 14.6032350063, 14.5109961033, 14.495292901999999, 14.6212890148, 14.616401910800001, 14.562317848199999, 14.5322878361, 14.599653959299999, 14.483633041399999, 14.4542350769, 14.616916894899999, 14.557915926], "pypy_op_count": 107489}], ["meteor-contest", "RawResult", {"changed_times": [0.40792703628499999, 0.30641794204700001, 0.29745411872900002, 0.29545879363999999, 0.296489953995, 0.29860687255899998, 0.30105400085400003, 0.29215121269200001, 0.29266119003300001, 0.29237389564499999, 0.29263496398900002, 0.29350113868700001, 0.31221795082100001, 0.29205083847000002, 0.29322814941399999, 0.30379986763, 0.29390215873699999, 0.29335880279499998, 0.29086709022500001, 0.29246902465800001, 0.29126787185699998, 0.29171013832100001, 0.29247283935500001, 0.29314398765600003, 0.29283618926999999, 0.29221892356899998, 0.29431104660000001, 0.29019904136699998, 0.29082083702099998, 0.29141306877099998, 0.29123091697699999, 0.291045188904, 0.291300058365, 0.291640996933, 0.30292105674699998, 0.29036998748800003, 0.29000306129499998, 0.29067301750199998, 0.29114389419600001, 0.28989410400400001, 0.29002285003700001, 0.29715704917899999, 0.29141998290999999, 0.29071307182299999, 0.29072785377499999, 0.29105806350699998, 0.290212869644, 0.29042005538900001, 0.29110693931600001, 0.29090595245400003], "base_times": [0.347690105438, 0.347379207611, 0.34742999076800002, 0.34710383415200002, 0.34720492363, 0.35657596588099999, 0.34880614280700001, 0.34798908233600001, 0.34818482399, 0.34763693809500001, 0.35144996643100002, 0.34738397598300003, 0.34841799736000001, 0.34782195091200002, 0.34782481193499998, 0.34755802154499998, 0.347153186798, 0.34736108779899999, 0.34814310073900001, 0.34753608703599997, 0.35088086128200002, 0.34869384765599998, 0.34764099121100001, 0.34830594062800002, 0.34752416610699999, 0.35147094726599998, 0.34795999526999999, 0.347304105759, 0.34738588333100001, 0.34823513030999997, 0.347759008408, 0.34758305549599999, 0.34703111648599999, 0.34795522689800001, 0.34786295890800001, 0.34761881828300001, 0.34715294837999999, 0.34786987304700001, 0.348106861115, 0.34849286079399999, 0.35142993926999999, 0.34857106208799998, 0.34806513786299997, 0.35013008117700001, 0.34740614891100002, 0.34765410423300003, 0.34815096855200001, 0.34855008125300002, 0.34793090820299999, 0.34848809242200002], "pypy_op_count": 6674}], ["pyflate-fast", "RawResult", {"changed_times": [1.6519870758099999, 1.5784091949500001, 1.6024479866000001, 1.61350607872, 1.6075489520999999, 1.5768828392000001, 1.5757160186800001, 1.57972717285, 1.60931491852, 1.61094999313, 1.6146519184100001, 1.57459115982, 1.6188890934, 1.57903695107, 1.57212114334, 1.59721899033, 1.5916159153, 1.58400392532, 1.57477903366, 1.5713949203499999, 1.57080197334, 1.5835900306699999, 1.6002171039599999, 1.5770637989, 1.6036200523399999, 1.60284805298, 1.5749628543900001, 1.60054397583, 1.5795609951, 1.5777959823600001, 1.5802130699200001, 1.58011412621, 1.5695910453799999, 1.58169198036, 1.5731301307700001, 1.62624001503, 1.5752248764000001, 1.57110095024, 1.6130831241600001, 1.5938441753399999, 1.57682108879, 1.5953419208499999, 1.5820338726000001, 1.59155392647, 1.5979809760999999, 1.59561705589, 1.5827250480699999, 1.5701649189, 1.56580495834, 1.5710790157300001], "base_times": [3.1845300197599999, 3.2045500278499999, 3.19848918915, 3.2003328800199999, 3.2087490558599998, 3.2016968727099999, 3.2089800834700002, 3.2027099132500001, 3.2039811611200002, 3.24145507812, 3.20942282677, 3.20827007294, 3.2240500450099998, 3.2162659168199998, 3.2056119442000002, 3.2037580013300002, 3.2212641239200002, 3.2320189475999999, 3.2128779888199999, 3.2171449661299998, 3.2016520500199999, 3.20188498497, 3.21955609322, 3.1961240768399999, 3.2113349437699998, 3.2268900871300001, 3.2027280330700001, 3.19735908508, 3.2125689983400001, 3.2135019302400001, 3.2120339870499999, 3.2142639160200002, 3.2021579742399999, 3.2210850715600001, 3.2075219154400001, 3.1982979774500002, 3.20001912117, 3.2098469734199999, 3.2058629989599998, 3.1986320018800001, 3.2141819000199998, 3.2192289829299998, 3.2053680419899999, 3.1992309093500002, 3.2024550437900001, 3.20250988007, 3.2268481254600001, 3.1993539333299998, 3.2168841362, 3.2366769313799999], "pypy_op_count": 23420}], ["raytrace-simple", "RawResult", {"changed_times": [0.13366389274599999, 0.134701013565, 0.13142418861399999, 0.13074302673300001, 0.12982892990100001, 0.171000003815, 0.12694597244299999, 0.17945313453699999, 0.125816106796, 0.12428021431, 0.12626290321399999, 0.124794006348, 0.12623906135599999, 0.12444615364100001, 0.12664604187, 0.12455892562900001, 0.126044988632, 0.124521970749, 0.12586498260500001, 0.12482690811200001, 0.12552618980399999, 0.124809980392, 0.12591314315800001, 0.124958992004, 0.12534403800999999, 0.12455701828, 0.125150918961, 0.12598013877899999, 0.12522602081299999, 0.185124874115, 0.123840808868, 0.13348698615999999, 0.124260187149, 0.123996973038, 0.12371802329999999, 0.125663995743, 0.124707937241, 0.123549938202, 0.12352514267, 0.123347997665, 0.12352180481, 0.12378787994400001, 0.123716831207, 0.1233689785, 0.122676134109, 0.13415789604200001, 0.122314929962, 0.12270283698999999, 0.12206196784999999, 0.122502088547], "base_times": [2.7074520587899999, 2.7396521568300001, 2.7307369708999998, 2.6815469264999998, 2.69208598137, 2.6841440200800002, 2.68341302872, 2.6919870376600001, 2.67920994759, 2.6848788261399998, 2.6872398853299999, 2.6906688213300001, 2.68470907211, 2.6994020938899999, 2.6836559772499999, 2.6999599933599998, 2.7059381008100001, 2.7178490161900002, 2.69162607193, 2.69529604912, 2.69365501404, 2.6848239898699999, 2.7007908821100002, 2.68273901939, 2.6807341575599999, 2.7054591178899998, 2.6811010837599998, 2.6822350025200001, 2.6875679493, 2.6807270050000001, 2.6843619346600001, 2.6876230239900001, 2.6776049137100002, 2.6855509281200001, 2.6869277954099999, 2.69268298149, 2.6812908649399998, 3.27709794044, 2.67981386185, 2.6810460090600001, 2.6829540729499999, 2.7469010353100001, 2.6949999332400001, 2.7791228294399999, 2.6784279346500002, 2.6833209991500002, 2.6874320507, 2.6835088729900001, 2.6847939491299999, 2.6912178993200002], "pypy_op_count": 22686}], ["richards", "RawResult", {"changed_times": [0.0333859920502, 0.0230309963226, 0.021921157836900001, 0.019187927246099998, 0.019721031189, 0.018500089645400002, 0.018771171569799999, 0.0185508728027, 0.0192182064056, 0.018935918808, 0.0181679725647, 0.018320083618199999, 0.0183529853821, 0.020343065261799999, 0.018018960952799999, 0.0188300609589, 0.0175318717957, 0.0177280902863, 0.018807888030999999, 0.024389982223500001, 0.018516063690200001, 0.017338037490799998, 0.034728050231900003, 0.0174200534821, 0.017560005188000001, 0.017487049102799999, 0.017470121383700001, 0.0173721313477, 0.017081975936900001, 0.017552137374899999, 0.0182728767395, 0.017580032348599998, 0.018982887268100001, 0.0172560214996, 0.017680168151899999, 0.017323017120399999, 0.016937971115099999, 0.0168349742889, 0.0168628692627, 0.017159938812300001, 0.016857147216800001, 0.017000913619999999, 0.017075061798099998, 0.017179965972899999, 0.0171399116516, 0.016782999038700001, 0.0165989398956, 0.016957998275799999, 0.017010927200300001, 0.0166709423065], "base_times": [0.35720610618600002, 0.35715699195900003, 0.35798192024199998, 0.35355997085599999, 0.35392904281600002, 0.35314893722500001, 0.356689929962, 0.35919094085699999, 0.352291107178, 0.35818099975599998, 0.35543990135199999, 0.36030197143600001, 0.36045312881500002, 0.35752296447800003, 0.35652589798000001, 0.364083051682, 0.36325383186299998, 0.35992693901099998, 0.35655713081399998, 0.37893795967100002, 0.35872888565099997, 0.35945200920100001, 0.358365058899, 0.35249805450400001, 0.36247181892399999, 0.35832405090300001, 0.35580086708100001, 0.35370302200300002, 0.35093998909000002, 0.35720777511599999, 0.36089897155799999, 0.36352515220600001, 0.35624098777800001, 0.358845949173, 0.361835002899, 0.35663890838599999, 0.35212683677700002, 0.35324692726099999, 0.363488912582, 0.359626054764, 0.35974693298299998, 0.35366106033299999, 0.35933279991099998, 0.36318302154499998, 0.35910010337800002, 0.36047887802099998, 0.35829305648799997, 0.35929489135699999, 0.35614109039300001, 0.350592851639], "pypy_op_count": 9814}], ["spambayes", "RawResult", {"changed_times": [0.30926299095199999, 0.36341595649699998, 0.31442809104899999, 0.34155201911900002, 0.27917313575699998, 0.25708198547400002, 0.288141012192, 0.26873421669000003, 0.193185091019, 0.19883394241300001, 0.23768210411099999, 0.219593048096, 0.23014211654700001, 0.204581975937, 0.351621866226, 0.27865505218499997, 0.21531105041500001, 0.170617818832, 0.20888614654500001, 0.16455888748200001, 0.156099081039, 0.21429800987200001, 0.165781021118, 0.17579698562599999, 0.161040067673, 0.18110013008100001, 0.197227954865, 0.16428899764999999, 0.15136885643, 0.17364501953100001, 0.18703007698099999, 0.20029592514, 0.15290808677699999, 0.15180897712700001, 0.14991092681900001, 0.19037008285500001, 0.15805292129500001, 0.150564193726, 0.16839098930400001, 0.14070105552699999, 0.14052796363799999, 0.14226913452100001, 0.17415404319800001, 0.153002023697, 0.138666152954, 0.135468959808, 0.16123890876800001, 0.15285181999200001, 0.15128684043900001, 0.13602519035300001], "base_times": [0.29797101020799999, 0.29795408248900002, 0.29835796356200001, 0.29880809783899998, 0.29825997352599998, 0.29821991920500002, 0.29865097999599999, 0.29807209968600001, 0.298465967178, 0.298011064529, 0.298724889755, 0.29867506027200003, 0.29831504821799998, 0.29797792434699999, 0.29795789718600002, 0.29978203773500001, 0.30584812164300001, 0.29834198951699997, 0.298421859741, 0.29822778701800001, 0.29804205894500002, 0.29873704910299997, 0.29856514930700001, 0.29852104187, 0.29927778243999997, 0.29851388931299999, 0.29833388328600002, 0.30034995078999999, 0.29816079139700002, 0.29787302017200001, 0.29880809783899998, 0.29832911491399999, 0.29772496223400002, 0.30263495445299998, 0.29818391799900001, 0.298354148865, 0.29819393158000002, 0.29884791374199998, 0.29862689971900003, 0.298354148865, 0.298292160034, 0.29874491691600003, 0.29856395721399998, 0.299323797226, 0.29808211326599998, 0.29864907264700002, 0.29830098152200002, 0.298420906067, 0.29808211326599998, 0.29806208610500001], "pypy_op_count": 52882}], ["spectral-norm", "RawResult", {"changed_times": [0.129083156586, 0.040797948837300001, 0.0403299331665, 0.038610935211200001, 0.039195060730000002, 0.039062976837200003, 0.036705970764199999, 0.039520025253299999, 0.036592960357700002, 0.036882877349899999, 0.037465810775800003, 0.035732030868499999, 0.035948991775499997, 0.035698175430300003, 0.036148786544799999, 0.036082029342700002, 0.036076068878200002, 0.036139011383100002, 0.035675048828099999, 0.036125898361199997, 0.036101102828999997, 0.035979986190799999, 0.036281108856199999, 0.035867929458599998, 0.035962104797400002, 0.035891771316500003, 0.036086797714200002, 0.035885095596300001, 0.035848140716600002, 0.036110877990700001, 0.035861968994099998, 0.035961151123000003, 0.036049842834500002, 0.035854101180999999, 0.035961151123000003, 0.035753965377799998, 0.036172866821299998, 0.035995006561300003, 0.035782098770100003, 0.0361349582672, 0.035686016082799997, 0.035908937454199999, 0.036139011383100002, 0.036530017852800002, 0.036205053329499998, 0.035715818405200002, 0.035990953445400001, 0.035962820053100003, 0.035777091980000002, 0.037657022476200001], "base_times": [0.48637008666999998, 0.48509907722500001, 0.48532485961900002, 0.485705852509, 0.484349012375, 0.48674201965300001, 0.48437905311599999, 0.485854148865, 0.48505711555499997, 0.48563790321400002, 0.48494076728800001, 0.48802995681799999, 0.485329866409, 0.483878850937, 0.48503589630100002, 0.48369884490999998, 0.48407912254300001, 0.483483076096, 0.48600912094100002, 0.48394894599900001, 0.48591303825400001, 0.48459815978999998, 0.48652315139800001, 0.48473119735699999, 0.49288892746000001, 0.48658990860000001, 0.48562598228499998, 0.48428487777700002, 0.485806941986, 0.48478317260699999, 0.485936164856, 0.48470091819799999, 0.48497200012199998, 0.48583221435500001, 0.48429799079899999, 0.48650097847000001, 0.485965967178, 0.48583102226300001, 0.48610591888400001, 0.48517489433299998, 0.48512792587300002, 0.485574007034, 0.48517203330999997, 0.48551893234299998, 0.48832988738999999, 0.48716092109699999, 0.48931503295899997, 0.48652195930499997, 0.48769593238800002, 0.49248003959699999], "pypy_op_count": 3208}], ["telco", "RawResult", {"changed_times": [0.29602000000000001, 0.22001299999999999, 0.204013, 0.204013, 0.19601199999999999, 0.204012, 0.18401200000000001, 0.18801200000000001, 0.15601000000000001, 0.16800999999999999, 0.15601000000000001, 0.16001000000000001, 0.15601000000000001, 0.16001000000000001, 0.16800999999999999, 0.16800999999999999, 0.15601000000000001, 0.15601100000000001, 0.15600900000000001, 0.15601000000000001, 0.15201000000000001, 0.16001000000000001, 0.16001000000000001, 0.16400999999999999, 0.15601000000000001, 0.15600900000000001, 0.15201000000000001, 0.15200900000000001, 0.15200900000000001, 0.16401099999999999, 0.16401099999999999, 0.148009, 0.15200900000000001, 0.148009, 0.15201000000000001, 0.15200900000000001, 0.148009, 0.15201000000000001, 0.15200900000000001, 0.16001000000000001, 0.15201000000000001, 0.148009, 0.148009, 0.15201000000000001, 0.148009, 0.148009, 0.15201000000000001, 0.148009, 0.16001000000000001, 0.15200900000000001], "base_times": [1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.21, 1.21, 1.21, 1.21, 1.22, 1.22, 1.21, 1.21, 1.2, 1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.21, 1.2, 1.22, 1.21, 1.21, 1.2, 1.21, 1.21, 1.21, 1.21, 1.22, 1.21, 1.2, 1.21, 1.21, 1.21, 1.22, 1.2, 1.21, 1.21, 1.22, 1.2, 1.22, 1.2, 1.21, 1.21, 1.21, 1.21, 1.21], "pypy_op_count": 19725}], ["twisted_names", "RawResult", {"changed_times": [0.0057937427578215531, 0.0053361792956243331, 0.0052742616033755272, 0.0053475935828877002, 0.0051282051282051282, 0.0052714812862414339, 0.0052994170641229464, 0.0052770448548812663, 0.0052493438320209973, 0.0050890585241730284, 0.0051177072671443197, 0.0051203277009728623, 0.0052742616033755272, 0.0051098620337250893, 0.0051098620337250893, 0.0052383446830801469, 0.0051098620337250893, 0.0051098620337250893, 0.0050838840874428059, 0.005246589716684155, 0.0051020408163265302, 0.0051072522982635342, 0.0052742616033755272, 0.0050838840874428059, 0.0051046452271567124, 0.0050890585241730284, 0.0052714812862414339, 0.0050890585241730284, 0.0051229508196721308, 0.0053191489361702126, 0.0050684237202230104, 0.0050838840874428059, 0.0051308363263211903, 0.0053276505061267982, 0.0051098620337250893, 0.0051124744376278121, 0.0052328623757195184, 0.0051177072671443197, 0.0051046452271567124, 0.005076142131979695, 0.0052273915316257188, 0.0050787201625190452, 0.0050735667174023336, 0.005243838489774515, 0.0050709939148073022, 0.0051072522982635342, 0.0051177072671443197, 0.0052882072977260709, 0.0051072522982635342, 0.0050942435048395313], "base_times": [0.0095969289827255271, 0.0095785440613026813, 0.0096339113680154135, 0.0095785440613026813, 0.0096153846153846159, 0.0095877277085330784, 0.0095969289827255271, 0.0095693779904306216, 0.0096153846153846159, 0.0095877277085330784, 0.0096246390760346481, 0.0095877277085330784, 0.0095969289827255271, 0.0096153846153846159, 0.0095877277085330784, 0.0095969289827255271, 0.0095969289827255271, 0.0095785440613026813, 0.0095602294455066923, 0.0096061479346781949, 0.0096153846153846159, 0.0095969289827255271, 0.0096618357487922701, 0.0095785440613026813, 0.0096246390760346481, 0.0096618357487922701, 0.0096153846153846159, 0.0096153846153846159, 0.0095877277085330784, 0.0096061479346781949, 0.0096246390760346481, 0.0096153846153846159, 0.0095785440613026813, 0.0095969289827255271, 0.0095785440613026813, 0.0095877277085330784, 0.0095969289827255271, 0.0096153846153846159, 0.0095785440613026813, 0.0095877277085330784, 0.0095785440613026813, 0.0096153846153846159, 0.0095969289827255271, 0.0097087378640776691, 0.0096899224806201549, 0.0096246390760346481, 0.0096899224806201549, 0.0096246390760346481, 0.009643201542912247, 0.009727626459143969], "pypy_op_count": 39483}]], "branch": "trunk", "revision": 0} \ No newline at end of file From commits-noreply at bitbucket.org Sat Mar 26 23:52:34 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 23:52:34 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: two more citations, fix one of the tiny functions Message-ID: <20110326225234.1945C282B9E@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3420:5fc620b6564e Date: 2011-03-26 21:58 +0100 http://bitbucket.org/pypy/extradoc/changeset/5fc620b6564e/ Log: two more citations, fix one of the tiny functions diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -141,6 +141,9 @@ techniques used for object models of dynamic languages, such as maps and polymorphic inline caches. +XXX replace "turn variable into constant" into "main tool to feedback language +and semantics specific runtime information into the compilation" + The contributions of this paper are: \begin{itemize} \item A hint to turn arbitrary variables into constants in the trace. @@ -394,7 +397,7 @@ There are cases in which it is useful to turn an arbitrary variable into a constant value. This process is called \emph{promotion} and it is an old idea -in partial evaluation (it's called ``the trick'' \cite{XXX} there). Promotion is also heavily +in partial evaluation (it's called ``The Trick'' \cite{jones_partial_1993} there). Promotion is also heavily used by Psyco \cite{rigo_representation-based_2004} and by all older versions of PyPy's JIT. Promotion is a technique that only works well in JIT compilers, in static compilers it is significantly less applicable. @@ -408,7 +411,6 @@ Let's make this more concrete. If we trace a call to the following function: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] def f2(x, y): - x = hint(x, promote=True) z = x * 2 + 1 return z + y \end{lstlisting} @@ -647,7 +649,7 @@ Therefore it makes sense to factor the layout information out of the instance implementation into a shared object, called the \emph{map}. Maps are a well-known technique to efficiently implement instances and come from the SELF project -\cite{XXX}. They are also used by many JavaScript implementations such as V8. +\cite{chambers_efficient_1989}. They are also used by many JavaScript implementations such as V8. The rewritten \texttt{Instance} class using maps can be seen in Figure~\ref{fig:maps}. @@ -812,6 +814,8 @@ \section{Related Work} +\cite{mario_wolczko_towards_1999} + \section{Conclusion and Next Steps} In this paper we presented two hints that can be used in the source code of an diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -206,6 +206,16 @@ pages = {53--64} }, + at book{jones_partial_1993, + title = {Partial evaluation and automatic program generation}, + isbn = {0-13-020249-5}, + url = {http://portal.acm.org/citation.cfm?id=153676}, + abstract = {This book is out of print. For copies, Please refer to the following online page}, + publisher = {{Prentice-Hall,} Inc.}, + author = {Neil D. Jones and Carsten K. Gomard and Peter Sestoft}, + year = {1993} +}, + @inproceedings{armin_rigo_pypys_2006, address = {Portland, Oregon, {USA}}, title = {{PyPy's} approach to virtual machine construction}, @@ -291,6 +301,32 @@ pages = {144--153} }, + at inproceedings{mario_wolczko_towards_1999, + title = {Towards a Universal Implementation Substrate for {Object-Oriented} Languages}, + abstract = {Self is a minimalist object-oriented language with a sophisticated implementation that utilizes adaptive optimization. We have built implementations of Smalltalk and Java by translation to Self. These implementations were much easier to construct in Self than by conventional means, and perform surprisingly well (competitively with conventional, commercial implementations). This leads us to believe that a Self-like system may form the basis of a universal substrate for implementation of object-oriented languages.}, + booktitle = {{OOPSLA} workshop on Simplicity, Performance, and Portability in Virtual Machine Design}, + author = {Mario Wolczko and Ole Agesen and David Ungar}, + year = {1999}, + keywords = {fixme}, + annote = {{{\textless}p{\textgreater}Describes} implementations of Smalltalk and Java by translation to {SELF.} The performance of each is better than some \"good\" implementations of both at the time. They argue that {SELF} can be used to make good implementations of {OO} languages by translation easily (employs many {PyPy-like} arguments, that {VM} construction is hard, {etc.).{\textless}/p{\textgreater}{\textless}p{\textgreater}\ {\textless}/p{\textgreater}{\textless}p{\textgreater}They} cut corners in some places (e.g. about floats in Java) and need to extend the handling of integers in the {SELF} {VM.} In addition, writing a translator to {SELF} {\textendash} while easier than writing a full {VM} in {C/C++} {\textendash} is still more work than actually writing a simple interpreter.{\textless}/p{\textgreater}} +}, + + at inproceedings{hoelzle_optimizing_1994, + address = {Orlando, Florida, United States}, + title = {Optimizing dynamically-dispatched calls with run-time type feedback}, + isbn = {{0-89791-662-X}}, + url = {http://portal.acm.org/citation.cfm?id=178243.178478}, + doi = {10.1145/178243.178478}, + abstract = {Note: {OCR} errors may be found in this Reference List extracted from the full text article. {ACM} has opted to expose the complete List rather than only correct and linked references.}, + booktitle = {Proceedings of the {ACM} {SIGPLAN} 1994 conference on Programming language design and implementation}, + publisher = {{ACM}}, + author = {Urs H\"{o}lzle and David Ungar}, + year = {1994}, + keywords = {{JIT,} polymorphic inline cache, self, type-feedback}, + pages = {326--336}, + annote = {{{\textless}p{\textgreater}Completely} straightforward paper about type-feedback: collect type statistics at runtime and use them later to make better code by special-casing the common cases.{\textless}/p{\textgreater}} +}, + @inproceedings{yermolovich_optimization_2009, address = {Orlando, Florida, {USA}}, title = {Optimization of dynamic languages using hierarchical layering of virtual machines}, @@ -317,6 +353,38 @@ year = {2007} }, + at article{chambers_efficient_1989, + title = {An efficient implementation of {SELF} a dynamically-typed object-oriented language based on prototypes}, + volume = {24}, + url = {http://portal.acm.org/citation.cfm?id=74884}, + doi = {10.1145/74878.74884}, + abstract = {We have developed and implemented techniques that double the performance of dynamically-typed object-oriented languages. Our {SELF} implementation runs twice as fast as the fastest Smalltalk implementation, despite {SELF's} lack of classes and explicit variables. To compensate for the absence of classes, our system uses implementation-level maps to transparently group objects cloned from the same prototype, providing data type information and eliminating the apparent space overhead for prototype-based systems. To compensate for dynamic typing, user-defined control structures, and the lack of explicit variables, our system dynamically compiles multiple versions of a source method, each customized according to its receiver's map. Within each version the type of the receiver is fixed, and thus the compiler can statically bind and inline all messages sent to self. Message splitting and type prediction extract and preserve even more static type information, allowing the compiler to inline many other messages. Inlining dramatically improves performance and eliminates the need to hard-wire low-level methods such as +,==, and {ifTrue:.} Despite inlining and other optimizations, our system still supports interactive programming environments. The system traverses internal dependency lists to invalidate all compiled methods affected by a programming change. The debugger reconstructs inlined stack frames from compiler-generated debugging information, making inlining invisible to the {SELF} programmer.}, + number = {10}, + journal = {{SIGPLAN} Not.}, + author = {C. Chambers and D. Ungar and E. Lee}, + year = {1989}, + keywords = {self, specialization}, + pages = {49--70}, + annote = {{\textless}p{\textgreater}describes the first implementation of {SELF.} Since {SELF} is highly dynamic, it is not easy to optimize it well.{\textless}/p{\textgreater} +{\textless}p{\textgreater}~{\textless}/p{\textgreater} +{{\textless}p{\textgreater}The} first problem is one of space, the prototypical nature of self makes its objects much larger. This is solved by "maps", which are like sharing dicts in pypy: every object has an associated map (structure object) that describes how the layout of the object. In that respect a map is a bit like a class, but user-invisible.{\textless}/p{\textgreater} +{\textless}p{\textgreater}~{\textless}/p{\textgreater} +{{\textless}p{\textgreater}The} compilation behavior of {SELF} is such that the every method is specialized for the map of the first argument. Then aggressive inlining is performed, which is particularly useful for self-sends (which are syntactically easy to write in {SELF),} since the lookup of those methods can be done at compile-time since the map is static due to specialization.{\textless}/p{\textgreater} +{\textless}p{\textgreater}~{\textless}/p{\textgreater} +{{\textless}p{\textgreater}Further} optimizations are removal of unused closures and method splitting (which essentially prevents merging of paths in the flow graph to keep more information).{\textless}/p{\textgreater}} +}, + + at inproceedings{hoelzle_optimizing_1991, + title = {Optimizing {Dynamically-Typed} {Object-Oriented} Languages With Polymorphic Inline Caches}, + isbn = {3-540-54262-0}, + url = {http://portal.acm.org/citation.cfm?id=679193&dl=ACM&coll=portal}, + booktitle = {Proceedings of the European Conference on {Object-Oriented} Programming}, + publisher = {{Springer-Verlag}}, + author = {Urs H\"{o}lzle and Craig Chambers and David Ungar}, + year = {1991}, + pages = {21--38} +}, + @inproceedings{rigo_representation-based_2004, address = {Verona, Italy}, title = {Representation-based just-in-time specialization and the Psyco prototype for Python}, From commits-noreply at bitbucket.org Sat Mar 26 23:52:34 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sat, 26 Mar 2011 23:52:34 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: some notes about related work out of a discussion with samuele Message-ID: <20110326225234.BB8F1282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3421:4f1077a55c4b Date: 2011-03-26 23:52 +0100 http://bitbucket.org/pypy/extradoc/changeset/4f1077a55c4b/ Log: some notes about related work out of a discussion with samuele diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -814,8 +814,31 @@ \section{Related Work} +partial evaluation \cite{futamura_partial_1999} \cite{jones_partial_1993} + +earlier examples of promotion \cite{carl_friedrich_bolz_towards_????}, \cite{armin_rigo_jit_2007} + +meta-tracers: SPUR \cite{bebenita_spur:_2010} +tamarin/lua stuff \cite{yermolovich_optimization_2009} +Dynamo \cite{sullivan_dynamic_2003} + +\cite{chambers_efficient_1989} maps +\cite{hoelzle_optimizing_1994} Type feedback +\cite{hoelzle_optimizing_1991} PICs +find the web page of V8 about maps + +is there anything about versions? smalltalks tend to clear their method caches +when new methods are added. self and java use dependency tracking and +deoptimization. this is better what we have above, because we need runtime +checks. mention out of line guard? + +jruby used versions at some point, jvm-l mailing list discusses them + \cite{mario_wolczko_towards_1999} +invokedynamic tries to give control to the language implementor about PICs and +deoptimization + \section{Conclusion and Next Steps} In this paper we presented two hints that can be used in the source code of an diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -206,6 +206,19 @@ pages = {53--64} }, + at article{futamura_partial_1999, + title = {Partial Evaluation of Computation Process - An Approach to a {Compiler-Compiler}}, + volume = {12}, + url = {http://citeseer.ist.psu.edu/futamura99partial.html}, + number = {4}, + journal = {{Higher-Order} and Symbolic Computation}, + author = {Yoshihiko Futamura}, + year = {1999}, + keywords = {Futamura}, + pages = {381--391}, + annote = {{{\textless}p{\textgreater}Classical} futamura paper, defines partial evaluation as follow:{\textless}/p{\textgreater}{\textless}p{\textgreater}\ {\textless}/p{\textgreater}{\textless}p{\textgreater}a function f(c, r) to some output. partial evaluation is a function a such that{\textless}/p{\textgreater}{\textless}p{\textgreater}f(c`, r`) = a(f, c`)(r`){\textless}/p{\textgreater}{\textless}p{\textgreater}\ {\textless}/p{\textgreater}{\textless}p{\textgreater}hints at futamura-projections. Describes a vague algorithm for actually doing partial evaluation. The algorithm contains some elements that are quite similar to merging.{\textless}/p{\textgreater}} +}, + @book{jones_partial_1993, title = {Partial evaluation and automatic program generation}, isbn = {0-13-020249-5}, @@ -400,6 +413,21 @@ pages = {15--26} }, + at inproceedings{sullivan_dynamic_2003, + address = {San Diego, California}, + title = {Dynamic native optimization of interpreters}, + isbn = {1-58113-655-2}, + url = {http://portal.acm.org/citation.cfm?id=858570.858576}, + doi = {10.1145/858570.858576}, + abstract = {For domain specific languages, "scripting languages", dynamic languages, and for virtual machine-based languages, the most straightforward implementation strategy is to write an interpreter. A simple interpreter consists of a loop that fetches the next bytecode, dispatches to the routine handling that bytecode, then loops. There are many ways to improve upon this simple mechanism, but as long as the execution of the program is driven by a representation of the program other than as a stream of native instructions, there will be some "interpretive {overhead".There} is a long history of approaches to removing interpretive overhead from programming language implementations. In practice, what often happens is that, once an interpreted language becomes popular, pressure builds to improve performance until eventually a project is undertaken to implement a native Just In Time {(JIT)} compiler for the language. Implementing a {JIT} is usually a large effort, affects a significant part of the existing language implementation, and adds a significant amount of code and complexity to the overall code {base.In} this paper, we present an innovative approach that dynamically removes much of the interpreted overhead from language implementations, with minimal instrumentation of the original interpreter. While it does not give the performance improvements of hand-crafted native compilers, our system provides an appealing point on the language implementation spectrum.}, + booktitle = {Proceedings of the 2003 workshop on Interpreters, virtual machines and emulators}, + publisher = {{ACM}}, + author = {Gregory T. Sullivan and Derek L. Bruening and Iris Baron and Timothy Garnett and Saman Amarasinghe}, + year = {2003}, + pages = {50--57}, + annote = {{{\textless}p{\textgreater}Describes} the application of Dynamo to interpreters. The unchanged dynamo does not fare too well on interpreters, since it traces one iteration of the bytecode loop, and the next iteration is likely to be very different. Matters are improved by adding hints to the interpreter that tell the tracer what the program counter of the interpreter is. Then the tracer only closes loops at the application {level.{\textless}/p{\textgreater}{\textless}p{\textgreater}\ {\textless}/p{\textgreater}{\textless}p{\textgreater}Strong} restrictions due to the fact that things happen on the assembler level.{\textless}/p{\textgreater}} +}, + @incollection{carl_friedrich_bolz_back_2008, title = {Back to the Future in One Week {\textemdash} Implementing a Smalltalk {VM} in {PyPy}}, url = {http://dx.doi.org/10.1007/978-3-540-89275-5_7}, From commits-noreply at bitbucket.org Sun Mar 27 00:45:57 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sun, 27 Mar 2011 00:45:57 +0100 (CET) Subject: [pypy-svn] extradoc extradoc: use index notation for SSA variables everywhere Message-ID: <20110326234557.555E2282B9E@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3422:78b4e79a3817 Date: 2011-03-27 00:45 +0100 http://bitbucket.org/pypy/extradoc/changeset/78b4e79a3817/ Log: use index notation for SSA variables everywhere diff --git a/talk/icooolps2011/code/trace3.tex b/talk/icooolps2011/code/trace3.tex --- a/talk/icooolps2011/code/trace3.tex +++ b/talk/icooolps2011/code/trace3.tex @@ -1,23 +1,23 @@ \begin{Verbatim} -# inst.getattr("a") -map1 = inst.map -guard(map1 == 0xb74af4a8) -storage1 = inst.storage -result1 = storage1[0] +# $inst_1$.getattr("a") +$map_1$ = $inst_1$.map +guard($map_1$ == 0xb74af4a8) +$storage_1$ = $inst_1$.storage +$result_1$ = $storage_1$[0] -# inst.getattr("b") -cls1 = inst.cls -methods1 = cls1.methods -result2 = dict.get(methods1, "b") -guard(result2 is not None) -v2 = result1 + result2 +# $inst_1$.getattr("b") +$cls_1$ = $inst_1$.cls +$methods_1$ = $cls_1$.methods +$result_2$ = dict.get($methods_1$, "b") +guard($result_2$ is not None) +$v_2$ = $result_1$ + $result_2$ -# inst.getattr("c") -cls2 = inst.cls -methods2 = cls2.methods -result3 = dict.get(methods2, "c") -guard(result3 is not None) +# $inst_1$.getattr("c") +$cls_2$ = $inst_1$.cls +$methods_2$ = $cls_2$.methods +$result_3$ = dict.get($methods_2$, "c") +guard($result_3$ is not None) -v4 = v2 + result3 -return(v4) +$v_4$ = $v_2$ + $result_3$ +return($v_4$) \end{Verbatim} diff --git a/talk/icooolps2011/code/trace2.tex b/talk/icooolps2011/code/trace2.tex --- a/talk/icooolps2011/code/trace2.tex +++ b/talk/icooolps2011/code/trace2.tex @@ -1,33 +1,33 @@ -\begin{lstlisting}[escapechar=|,basicstyle=\ttfamily]] -# inst.getattr("a") -map1 = inst.map -guard(map1 == 0xb74af4a8) -|{\color{gray}index1 = Map.getindex(map1, "a")}| -|{\color{gray}guard(index1 != -1)}| -storage1 = inst.storage -result1 = storage1[index1] +\begin{lstlisting}[mathescape,escapechar=|,basicstyle=\ttfamily]] +# $inst_1$.getattr("a") +$map_1$ = $inst_1$.map +guard($map_1$ == 0xb74af4a8) +|{\color{gray}$index_1$ = Map.getindex($map_1$, "a")}| +|{\color{gray}guard($index_1$ != -1)}| +$storage_1$ = $inst_1$.storage +$result_1$ = $storage_1$[$index_1$] -# inst.getattr("b") -|{\color{gray}map2 = inst.map}| -|{\color{gray}guard(map2 == 0xb74af4a8)}| -|{\color{gray}index2 = Map.getindex(map2, "b")}| -|{\color{gray}guard(index2 == -1)}| -cls1 = inst.cls -methods1 = cls.methods -result2 = dict.get(methods1, "b") -guard(result2 is not None) -v2 = result1 + result2 +# $inst_1$.getattr("b") +|{\color{gray}$map_2$ = $inst_1$.map}| +|{\color{gray}guard($map_2$ == 0xb74af4a8)}| +|{\color{gray}$index_2$ = Map.getindex($map_2$, "b")}| +|{\color{gray}guard($index_2$ == -1)}| +$cls_1$ = $inst_1$.cls +$methods_1$ = $cls_1$.methods +$result_2$ = dict.get($methods_1$, "b") +guard($result_2$ is not None) +$v_2$ = $result_1$ + $result_2$ -# inst.getattr("c") -|{\color{gray}map3 = inst.map}| -|{\color{gray}guard(map3 == 0xb74af4a8)}| -|{\color{gray}index3 = Map.getindex(map3, "c")}| -|{\color{gray}guard(index3 == -1)}| -cls1 = inst.cls -methods2 = cls.methods -result3 = dict.get(methods2, "c") -guard(result3 is not None) +# $inst_1$.getattr("c") +|{\color{gray}$map_3$ = $inst_1$.map}| +|{\color{gray}guard($map_3$ == 0xb74af4a8)}| +|{\color{gray}$index_3$ = Map.getindex($map_3$, "c")}| +|{\color{gray}guard($index_3$ == -1)}| +$cls_2$ = $inst_1$.cls +$methods_2$ = $cls_2$.methods +$result_3$ = dict.get($methods_2$, "c") +guard($result_3$ is not None) -v4 = v2 + result3 -return(v4) +$v_4$ = $v_2$ + $result_3$ +return($v_4$) \end{lstlisting} diff --git a/talk/icooolps2011/code/trace5.tex b/talk/icooolps2011/code/trace5.tex --- a/talk/icooolps2011/code/trace5.tex +++ b/talk/icooolps2011/code/trace5.tex @@ -1,18 +1,18 @@ \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -# inst.getattr("a") -map1 = inst.map -guard(map1 == 0xb74af4a8) -storage1 = inst.storage -result1 = storage1[0] +# $inst_1$.getattr("a") +$map_1$ = $inst_1$.map +guard($map_1$ == 0xb74af4a8) +$storage_1$ = $inst_1$.storage +$result_1$ = $storage_1$[0] -# inst.getattr("b") -cls1 = inst.cls -guard(cls1 == 0xb7aaaaf8) -version1 = cls1.version -guard(version1 == 0xb7bbbb18) -v2 = result1 + 41 +# $inst_1$.getattr("b") +$cls_1$ = $inst_1$.cls +guard($cls_1$ == 0xb7aaaaf8) +$version_1$ = $cls_1$.version +guard($version_1$ == 0xb7bbbb18) +$v_2$ = $result_1$ + 41 -# inst.getattr("c") -v4 = v2 + 17 -return(v4) +# $inst_1$.getattr("c") +$v_4$ = $v_2$ + 17 +return($v_4$) \end{lstlisting} diff --git a/talk/icooolps2011/code/trace4.tex b/talk/icooolps2011/code/trace4.tex --- a/talk/icooolps2011/code/trace4.tex +++ b/talk/icooolps2011/code/trace4.tex @@ -1,37 +1,37 @@ \begin{lstlisting}[escapechar=|,mathescape,basicstyle=\ttfamily] -# inst.getattr("a") -map1 = inst.map -guard(map1 == 0xb74af4a8) -|{\color{gray}index1 = Map.getindex(map1, "a")}| -|{\color{gray}guard(index1 != -1)}| -storage1 = inst.storage -result1 = storage1[index1] +# $inst_1$.getattr("a") +$map_1$ = $inst_1$.map +guard($map_1$ == 0xb74af4a8) +|{\color{gray}$index_1$ = Map.getindex($map_1$, "a")}| +|{\color{gray}guard($index_1$ != -1)}| +$storage_1$ = $inst_1$.storage +$result_1$ = $storage_1$[$index_1$] -# inst.getattr("b") -|{\color{gray}map2 = inst.map}| -|{\color{gray}guard(map2 == 0xb74af4a8)}| -|{\color{gray}index2 = Map.getindex(map2, "b")}| -|{\color{gray}guard(index2 == -1)}| -cls1 = inst.cls -guard(cls1 == 0xb7aaaaf8) -version1 = cls1.version -guard(version1 == 0xb7bbbb18) -|{\color{gray}result2 = Class.\_find\_method(cls, "b", version1)}| -|{\color{gray}guard(result2 is not None)}| -v2 = result1 + result2 +# $inst_1$.getattr("b") +|{\color{gray}$map_2$ = $inst_1$.map}| +|{\color{gray}guard($map_2$ == 0xb74af4a8)}| +|{\color{gray}$index_2$ = Map.getindex($map_2$, "b")}| +|{\color{gray}guard($index_2$ == -1)}| +$cls_1$ = $inst_1$.cls +guard($cls_1$ == 0xb7aaaaf8) +$version_1$ = $cls_1$.version +guard($version_1$ == 0xb7bbbb18) +|{\color{gray}$result_2$ = Class.\_find\_method($cls_1$, "b", $version_1$)}| +|{\color{gray}guard($result_2$ is not None)}| +$v_2$ = $result_1$ + $result_2$ -# inst.getattr("c") -|{\color{gray}map3 = inst.map}| -|{\color{gray}guard(map3 == 0xb74af4a8)}| -|{\color{gray}index3 = Map.getindex(map3, "c")}| -|{\color{gray}guard(index3 == -1)}| -|{\color{gray}cls2 = inst.cls}| -|{\color{gray}guard(cls2 == 0xb7aaaaf8)}| -|{\color{gray}version2 = cls2.version}| -|{\color{gray}guard(version2 == 0xb7bbbb18)}| -|{\color{gray}result3 = Class.\_find\_method(cls, "c", version2)}| -|{\color{gray}guard(result3 is not None)}| +# $inst_1$.getattr("c") +|{\color{gray}$map_3$ = $inst_1$.map}| +|{\color{gray}guard($map_3$ == 0xb74af4a8)}| +|{\color{gray}$index_3$ = Map.getindex($map_3$, "c")}| +|{\color{gray}guard($index_3$ == -1)}| +|{\color{gray}$cls_2$ = $inst_1$.cls}| +|{\color{gray}guard($cls_2$ == 0xb7aaaaf8)}| +|{\color{gray}$version_2$ = $cls_2$.version}| +|{\color{gray}guard($version_2$ == 0xb7bbbb18)}| +|{\color{gray}$result_3$ = Class.\_find\_method($cls_2$, "c", $version_2$)}| +|{\color{gray}guard($result_3$ is not None)}| -v4 = v2 + result3 -return(v4) +$v_4$ = $v_2$ + $result_3$ +return($v_4$) \end{lstlisting} diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -382,17 +382,17 @@ y = y + x \end{lstlisting} -If the fragment is traced with \texttt{x} being \texttt{4}, the following trace is +If the fragment is traced with $x_1$ being \texttt{4}, the following trace is produced: % \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(x == 4) -y = y + x +guard($x_1$ == 4) +$y_2$ = $y_1$ + $x_1$ \end{lstlisting} -In the trace above, the value of \texttt{x} is statically known thanks to the +In the trace above, the value of $x_1$ is statically known thanks to the guard. Remember that a guard is a runtime check. The above trace will run to -completion when \texttt{x == 4}. If the check fails, execution of the trace is +completion when $x_1$ \texttt{== 4}. If the check fails, execution of the trace is stopped and the interpreter continues to run. There are cases in which it is useful to turn an arbitrary variable @@ -410,7 +410,7 @@ Let's make this more concrete. If we trace a call to the following function: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -def f2(x, y): +def f1(x, y): z = x * 2 + 1 return z + y \end{lstlisting} @@ -418,18 +418,18 @@ We get a trace that looks like this: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -v1 = x * 2 -z = v1 + 1 -v2 = z + y -return(v2) +$v_1$ = $x_1$ * 2 +$z_1$ = $v_1$ + 1 +$v_2$ = $z_1$ + $y_1$ +return($v_2$) \end{lstlisting} Observe how the first two operations could be constant-folded if the value of -\texttt{x} were known. Let's assume that the value of \texttt{x} can vary, but does so +$x_1$ were known. Let's assume that the value of \texttt{x} in the Python code can vary, but does so rarely, i.e. only takes a few different values at runtime. If this is the case, we can add a hint to promote \texttt{x}, like this: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -def f2(x, y): +def f1(x, y): x = hint(x, promote=True) z = x * 2 + 1 return z + y @@ -444,44 +444,44 @@ operation at the beginning: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(x == 4) -v1 = x * 2 -z = v1 + 1 -v2 = z + y +guard($x_1$ == 4) +$v_1$ = $x_1$ * 2 +$z_1$ = $v_1$ + 1 +$v_2$ = $z_1$ + $y_1$ return(v2) \end{lstlisting} The promotion is turned into a \texttt{guard} operation in the trace. The guard -captures the value of \texttt{x} as it was at runtime. From the point of view of the +captures the value of $x_1$ as it was at runtime. From the point of view of the optimizer, this guard is not any different than the one produced by the \texttt{if} statement in the example above. After the guard, the rest of the trace can -assume that \texttt{x} is equal to \texttt{4}, meaning that the optimizer will turn this +assume that $x_1$ is equal to \texttt{4}, meaning that the optimizer will turn this trace into: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(x == 4) -v2 = 9 + y -return(v2) +guard($x_1$ == 4) +$v_2$ = 9 + $y_1$ +return($v_2$) \end{lstlisting} Notice how the first two arithmetic operations were constant folded. The hope is that the guard is executed quicker than the multiplication and the addition that was now optimized away. -If this trace is executed with values of \texttt{x} other than \texttt{4}, the guard will +If this trace is executed with values of $x_1$ other than \texttt{4}, the guard will fail, and execution will continue in the interpreter. If the guard fails often enough, a new trace will be started from the guard. This other trace will -capture a different value of \texttt{x}. If it is e.g. \texttt{2}, then the optimized +capture a different value of $x_1$. If it is e.g. \texttt{2}, then the optimized trace looks like this: \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(x == 2) -v2 = 5 + y -return(v2) +guard($x_1$ == 2) +$v_2$ = 5 + $y_1$ +return($v_2$) \end{lstlisting} This new trace will be attached to the guard instruction of the first trace. If -\texttt{x} takes on even more values, a new trace will eventually be made for all of them, +$x_1$ takes on even more values, a new trace will eventually be made for all of them, linking them into a chain. This is clearly not desirable, so we should promote only variables that don't vary much. However, adding a promotion hint will never produce wrong results. It might just lead to too much assembler code. @@ -527,15 +527,15 @@ trace (note how the call to \texttt{compute} is inlined): % \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -x = a.x -v1 = x * 2 -v2 = v1 + 1 -v3 = v2 + val -a.y = v3 +$x_1$ = $a_1$.x +$v_1$ = $x_1$ * 2 +$v_2$ = $v_1$ + 1 +$v_3$ = $v_2$ + $val_1$ +$a_1$.y = $v_3$ \end{lstlisting} In this case, adding a promote of \texttt{self} in the \texttt{f} method to get rid of the -computation of the first few operations does not help. Even if \texttt{a} is a +computation of the first few operations does not help. Even if $a_1$ is a constant reference to an object, reading the \texttt{x} field does not necessarily always yield the same value. To solve this problem, there is another annotation, which lets the interpreter author communicate invariants to the optimizer. In @@ -562,10 +562,10 @@ Now the trace will look like this: % \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(a == 0xb73984a8) -v1 = compute(a) -v2 = v1 + val -a.y = v2 +guard($a_1$ == 0xb73984a8) +$v_1$ = compute($a_1$) +$v_2$ = $v_1$ + $val_1$ +$a_1$.y = $v_2$ \end{lstlisting} Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used @@ -576,9 +576,9 @@ trace looks like this: % \begin{lstlisting}[mathescape,basicstyle=\ttfamily] -guard(a == 0xb73984a8) -v2 = 9 + val -a.y = v2 +guard($a_1$ == 0xb73984a8) +$v_2$ = 9 + $val_1$ +$a_1$.y = $v_2$ \end{lstlisting} (assuming that the \texttt{x} field's value is \texttt{4}). From commits-noreply at bitbucket.org Sun Mar 27 12:36:19 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 12:36:19 +0200 (CEST) Subject: [pypy-svn] pypy jit-str_in_preamble: testing boxed virtual strings Message-ID: <20110327103619.B558C282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42979:dfaced0323f6 Date: 2011-03-27 11:14 +0200 http://bitbucket.org/pypy/pypy/changeset/dfaced0323f6/ Log: testing boxed virtual strings diff --git a/pypy/jit/metainterp/test/test_string.py b/pypy/jit/metainterp/test/test_string.py --- a/pypy/jit/metainterp/test/test_string.py +++ b/pypy/jit/metainterp/test/test_string.py @@ -337,6 +337,58 @@ return sa assert self.meta_interp(f, [0]) == f(0) + def test_virtual_strings_direct(self): + _str = self._str + fillers = _str("abcdefghijklmnopqrstuvwxyz") + data = _str("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + + mydriver = JitDriver(reds = ['line', 'noise', 'res'], greens = []) + def f(): + line = data + noise = fillers + ratio = len(line) // len(noise) + res = data[0:0] + while line and noise: + mydriver.jit_merge_point(line=line, noise=noise, res=res) + if len(line) // len(noise) > ratio: + c, line = line[0], line[1:] + else: + c, noise = noise[0], noise[1:] + res += c + return res + noise + line + s1 = self.meta_interp(f, []) + s2 = f() + for c1, c2 in zip(s1.chars, s2): + assert c1==c2 + + def test_virtual_strings_boxed(self): + _str = self._str + fillers = _str("abcdefghijklmnopqrstuvwxyz") + data = _str("ABCDEFGHIJKLMNOPQRSTUVWXYZ") + class Str(object): + def __init__(self, value): + self.value = value + mydriver = JitDriver(reds = ['ratio', 'line', 'noise', 'res'], + greens = []) + def f(): + line = Str(data) + noise = Str(fillers) + ratio = len(line.value) // len(noise.value) + res = Str(data[0:0]) + while line.value and noise.value: + mydriver.jit_merge_point(line=line, noise=noise, res=res, + ratio=ratio) + if len(line.value) // len(noise.value) > ratio: + c, line = line.value[0], Str(line.value[1:]) + else: + c, noise = noise.value[0], Str(noise.value[1:]) + res = Str(res.value + c) + return res.value + noise.value + line.value + s1 = self.meta_interp(f, []) + s2 = f() + for c1, c2 in zip(s1.chars, s2): + assert c1==c2 + #class TestOOtype(StringTests, OOJitMixin): # CALL = "oosend" From commits-noreply at bitbucket.org Sun Mar 27 12:36:20 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 12:36:20 +0200 (CEST) Subject: [pypy-svn] pypy jit-str_in_preamble: proper cloning of values when transferin them to next iteration Message-ID: <20110327103620.DF766282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42980:025d79e40a22 Date: 2011-03-27 12:35 +0200 http://bitbucket.org/pypy/pypy/changeset/025d79e40a22/ Log: proper cloning of values when transferin them to next iteration diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -90,12 +90,12 @@ fieldvalue = optheap.getvalue(op.getarg(1)) self.remember_field_value(structvalue, fieldvalue) - def get_reconstructed(self, optimizer, valuemap): + def get_cloned(self, optimizer, valuemap): assert self._lazy_setfield is None cf = CachedField() for structvalue, fieldvalue in self._cached_fields.iteritems(): - structvalue2 = structvalue.get_reconstructed(optimizer, valuemap) - fieldvalue2 = fieldvalue .get_reconstructed(optimizer, valuemap) + structvalue2 = structvalue.get_cloned(optimizer, valuemap) + fieldvalue2 = fieldvalue .get_cloned(optimizer, valuemap) cf._cached_fields[structvalue2] = fieldvalue2 return cf @@ -130,7 +130,7 @@ assert 0 # was: new.lazy_setfields = self.lazy_setfields for descr, d in self.cached_fields.items(): - new.cached_fields[descr] = d.get_reconstructed(optimizer, valuemap) + new.cached_fields[descr] = d.get_cloned(optimizer, valuemap) new.cached_arrayitems = {} for descr, d in self.cached_arrayitems.items(): @@ -138,16 +138,17 @@ new.cached_arrayitems[descr] = newd for value, cache in d.items(): newcache = CachedArrayItems() - newd[value.get_reconstructed(optimizer, valuemap)] = newcache + newd[value.get_cloned(optimizer, valuemap)] = newcache if cache.var_index_item: newcache.var_index_item = \ - cache.var_index_item.get_reconstructed(optimizer, valuemap) + cache.var_index_item.get_cloned(optimizer, valuemap) if cache.var_index_indexvalue: newcache.var_index_indexvalue = \ - cache.var_index_indexvalue.get_reconstructed(optimizer, valuemap) + cache.var_index_indexvalue.get_cloned(optimizer, + valuemap) for index, fieldvalue in cache.fixed_index_items.items(): newcache.fixed_index_items[index] = \ - fieldvalue.get_reconstructed(optimizer, valuemap) + fieldvalue.get_cloned(optimizer, valuemap) return new diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -32,9 +32,16 @@ known_class = None intbound = None - def __init__(self, box): + def __init__(self, box, level=None, known_class=None, intbound=None): self.box = box - self.intbound = IntBound(MININT, MAXINT) #IntUnbounded() + if level is not None: + self.level = level + self.known_class = known_class + if intbound: + self.intbound = intbound + else: + self.intbound = IntBound(MININT, MAXINT) #IntUnbounded() + if isinstance(box, Const): self.make_constant(box) # invariant: box is a Const if and only if level == LEVEL_CONSTANT @@ -51,23 +58,27 @@ boxes.append(self.force_box()) already_seen[self.get_key_box()] = None - def get_reconstructed(self, optimizer, valuemap, force_if_needed=True): + def get_cloned(self, optimizer, valuemap, force_if_needed=True): if self in valuemap: return valuemap[self] - new = self.reconstruct_for_next_iteration(optimizer) + new = self.clone_for_next_iteration(optimizer) if new is None: if force_if_needed: new = OptValue(self.force_box()) else: return None + else: + assert new.__class__ is self.__class__ + assert new.is_virtual() == self.is_virtual() valuemap[self] = new - self.reconstruct_childs(new, valuemap) + self.clone_childs(new, valuemap) return new - def reconstruct_for_next_iteration(self, optimizer): - return self + def clone_for_next_iteration(self, optimizer): + return OptValue(self.box, self.level, self.known_class, + self.intbound.clone()) - def reconstruct_childs(self, new, valuemap): + def clone_childs(self, new, valuemap): pass def get_args_for_fail(self, modifier): @@ -166,6 +177,9 @@ def __init__(self, box): self.make_constant(box) + def clone_for_next_iteration(self, optimizer): + return self + CONST_0 = ConstInt(0) CONST_1 = ConstInt(1) CVAL_ZERO = ConstantValue(CONST_0) @@ -305,13 +319,13 @@ new.interned_refs = self.interned_refs new.bool_boxes = {} for value in new.bool_boxes.keys(): - new.bool_boxes[value.get_reconstructed(new, valuemap)] = None + new.bool_boxes[value.get_cloned(new, valuemap)] = None # FIXME: Move to rewrite.py new.loop_invariant_results = {} for key, value in self.loop_invariant_results.items(): new.loop_invariant_results[key] = \ - value.get_reconstructed(new, valuemap) + value.get_cloned(new, valuemap) new.pure_operations = self.pure_operations new.producer = self.producer @@ -320,8 +334,8 @@ for box, value in self.values.items(): box = new.getinterned(box) force = box in surviving_boxes - value = value.get_reconstructed(new, valuemap, - force_if_needed=force) + value = value.get_cloned(new, valuemap, + force_if_needed=force) if value is not None: new.values[box] = value diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py --- a/pypy/jit/metainterp/optimizeopt/virtualize.py +++ b/pypy/jit/metainterp/optimizeopt/virtualize.py @@ -46,7 +46,7 @@ def _really_force(self): raise NotImplementedError("abstract base") - def reconstruct_for_next_iteration(self, _optimizer): + def clone_for_next_iteration(self, _optimizer): return None def get_fielddescrlist_cache(cpu): @@ -144,17 +144,16 @@ else: boxes.append(self.box) - def reconstruct_for_next_iteration(self, optimizer): - self.optimizer = optimizer - return self + def clone_for_next_iteration(self, optimizer): + raise NotImplementedError - def reconstruct_childs(self, new, valuemap): + def clone_childs(self, new, valuemap): assert isinstance(new, AbstractVirtualStructValue) if new.box is None: lst = self._get_field_descr_list() for ofs in lst: new._fields[ofs] = \ - self._fields[ofs].get_reconstructed(new.optimizer, valuemap) + self._fields[ofs].get_cloned(new.optimizer, valuemap) class VirtualValue(AbstractVirtualStructValue): level = optimizer.LEVEL_KNOWNCLASS @@ -175,6 +174,12 @@ field_names = [field.name for field in self._fields] return "" % (cls_name, field_names) + def clone_for_next_iteration(self, optimizer): + new = VirtualValue(optimizer, self.known_class, self.keybox, + self.source_op) + new.box = self.box + return new + class VStructValue(AbstractVirtualStructValue): def __init__(self, optimizer, structdescr, keybox, source_op=None): @@ -185,6 +190,12 @@ fielddescrs = self._get_field_descr_list() return modifier.make_vstruct(self.structdescr, fielddescrs) + def clone_for_next_iteration(self, optimizer): + new = VStructValue(optimizer, self.structdescr, self.keybox, + self.source_op) + new.box = self.box + return new + class VArrayValue(AbstractVirtualValue): def __init__(self, optimizer, arraydescr, size, keybox, source_op=None): @@ -247,16 +258,18 @@ else: boxes.append(self.box) - def reconstruct_for_next_iteration(self, optimizer): - self.optimizer = optimizer - return self + def clone_for_next_iteration(self, optimizer): + new = VArrayValue(optimizer, self.arraydescr, len(self._items), + self.keybox, self.source_op) + new.box = self.box + return new - def reconstruct_childs(self, new, valuemap): + def clone_childs(self, new, valuemap): assert isinstance(new, VArrayValue) if new.box is None: for i in range(len(self._items)): - new._items[i] = self._items[i].get_reconstructed(new.optimizer, - valuemap) + new._items[i] = self._items[i].get_cloned(new.optimizer, + valuemap) class OptVirtualize(optimizer.Optimization): "Virtualize objects until they escape." From commits-noreply at bitbucket.org Sun Mar 27 13:06:46 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 13:06:46 +0200 (CEST) Subject: [pypy-svn] pypy jit-str_in_preamble: forcing unsupported vritauls changes the VirtualState Message-ID: <20110327110646.43F76282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42981:31a39eb2e87b Date: 2011-03-27 13:06 +0200 http://bitbucket.org/pypy/pypy/changeset/31a39eb2e87b/ Log: forcing unsupported vritauls changes the VirtualState diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -262,13 +262,13 @@ assert jumpop.getdescr() is loop.token jump_args = jumpop.getarglist() jumpop.initarglist([]) - #virtual_state = [self.getvalue(a).is_virtual() for a in jump_args] - modifier = VirtualStateAdder(self.optimizer) - virtual_state = modifier.get_virtual_state(jump_args) loop.preamble.operations = self.optimizer.newoperations preamble_optimizer = self.optimizer self.optimizer = self.optimizer.reconstruct_for_next_iteration(jump_args) + modifier = VirtualStateAdder(self.optimizer) + virtual_state = modifier.get_virtual_state(jump_args) + try: inputargs = self.inline(self.cloned_operations, loop.inputargs, jump_args) diff --git a/pypy/jit/metainterp/test/test_string.py b/pypy/jit/metainterp/test/test_string.py --- a/pypy/jit/metainterp/test/test_string.py +++ b/pypy/jit/metainterp/test/test_string.py @@ -389,6 +389,23 @@ for c1, c2 in zip(s1.chars, s2): assert c1==c2 + def test_string_in_virtual_state(self): + _str = self._str + s1 = _str("a") + s2 = _str("AA") + mydriver = JitDriver(reds = ['i', 'n', 'sa'], greens = []) + def f(n): + sa = s1 + i = 0 + while i < n: + mydriver.jit_merge_point(i=i, n=n, sa=sa) + if i&4 == 0: + sa += s1 + else: + sa += s2 + i += 1 + return len(sa) + assert self.meta_interp(f, [16]) == f(16) #class TestOOtype(StringTests, OOJitMixin): # CALL = "oosend" From commits-noreply at bitbucket.org Sun Mar 27 15:41:55 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 15:41:55 +0200 (CEST) Subject: [pypy-svn] pypy jit-usable_retrace: Proper handling of resuming from a GUARD_NO_OVERFLOW inlined from the short preamble. This fix should probably go into trunk aswell. Message-ID: <20110327134155.C61E036C206@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42982:b4c589b6c58c Date: 2011-03-27 15:41 +0200 http://bitbucket.org/pypy/pypy/changeset/b4c589b6c58c/ Log: Proper handling of resuming from a GUARD_NO_OVERFLOW inlined from the short preamble. This fix should probably go into trunk aswell. diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -460,9 +460,8 @@ res += ovfcheck(x1 * x1) except OverflowError: res += 1 - #if y>2)&1==0: y -= 1 - if (y>>2)&1==0: + if y&4 == 0: x1, x2 = x2, x1 return res res = self.meta_interp(f, [6, sys.maxint, 32, 48]) diff --git a/pypy/jit/metainterp/compile.py b/pypy/jit/metainterp/compile.py --- a/pypy/jit/metainterp/compile.py +++ b/pypy/jit/metainterp/compile.py @@ -317,7 +317,7 @@ if self.must_compile(metainterp_sd, jitdriver_sd): return self._trace_and_compile_from_bridge(metainterp_sd, jitdriver_sd) - else: + else: from pypy.jit.metainterp.blackhole import resume_in_blackhole resume_in_blackhole(metainterp_sd, jitdriver_sd, self) assert 0, "unreachable" diff --git a/pypy/jit/metainterp/pyjitpl.py b/pypy/jit/metainterp/pyjitpl.py --- a/pypy/jit/metainterp/pyjitpl.py +++ b/pypy/jit/metainterp/pyjitpl.py @@ -1716,11 +1716,12 @@ self.seen_loop_header_for_jdindex = -1 if isinstance(key, compile.ResumeAtPositionDescr): self.seen_loop_header_for_jdindex = self.jitdriver_sd.index - dont_change_position = True + resume_at_position = True else: - dont_change_position = False + resume_at_position = False try: - self.prepare_resume_from_failure(key.guard_opnum, dont_change_position) + self.prepare_resume_from_failure(key.guard_opnum, + resume_at_position) if self.resumekey_original_loop_token is None: # very rare case raise SwitchToBlackhole(ABORT_BRIDGE) self.interpret() @@ -1824,10 +1825,10 @@ history.set_future_values(self.cpu, residual_args) return loop_token - def prepare_resume_from_failure(self, opnum, dont_change_position=False): + def prepare_resume_from_failure(self, opnum, resume_at_position=False): frame = self.framestack[-1] if opnum == rop.GUARD_TRUE: # a goto_if_not that jumps only now - if not dont_change_position: + if not resume_at_position: frame.pc = frame.jitcode.follow_jump(frame.pc) elif opnum == rop.GUARD_FALSE: # a goto_if_not that stops jumping pass @@ -1838,6 +1839,8 @@ opnum == rop.GUARD_NONNULL_CLASS): pass # the pc is already set to the *start* of the opcode elif opnum == rop.GUARD_NO_EXCEPTION or opnum == rop.GUARD_EXCEPTION: + if resume_at_position: + assert False, "FIXME: How do we handle exceptions here?" exception = self.cpu.grab_exc_value() if exception: self.execute_ll_raised(lltype.cast_opaque_ptr(rclass.OBJECTPTR, @@ -1849,11 +1852,14 @@ except ChangeFrame: pass elif opnum == rop.GUARD_NO_OVERFLOW: # an overflow now detected - self.execute_raised(OverflowError(), constant=True) - try: - self.finishframe_exception() - except ChangeFrame: - pass + if resume_at_position: + self.clear_exception() + else: + self.execute_raised(OverflowError(), constant=True) + try: + self.finishframe_exception() + except ChangeFrame: + pass elif opnum == rop.GUARD_OVERFLOW: # no longer overflowing self.clear_exception() else: diff --git a/pypy/jit/metainterp/blackhole.py b/pypy/jit/metainterp/blackhole.py --- a/pypy/jit/metainterp/blackhole.py +++ b/pypy/jit/metainterp/blackhole.py @@ -1243,13 +1243,13 @@ assert kind == 'v' return lltype.nullptr(rclass.OBJECTPTR.TO) - def _prepare_resume_from_failure(self, opnum, dont_change_position=False): + def _prepare_resume_from_failure(self, opnum, resume_at_position=False): from pypy.jit.metainterp.resoperation import rop # if opnum == rop.GUARD_TRUE: # Produced directly by some goto_if_not_xxx() opcode that did not # jump, but which must now jump. The pc is just after the opcode. - if not dont_change_position: + if not resume_at_position: self.position = self.jitcode.follow_jump(self.position) # elif opnum == rop.GUARD_FALSE: @@ -1280,7 +1280,8 @@ elif opnum == rop.GUARD_NO_OVERFLOW: # Produced by int_xxx_ovf(). The pc is just after the opcode. # We get here because it did not used to overflow, but now it does. - return get_llexception(self.cpu, OverflowError()) + if not resume_at_position: + return get_llexception(self.cpu, OverflowError()) # elif opnum == rop.GUARD_OVERFLOW: # Produced by int_xxx_ovf(). The pc is just after the opcode. @@ -1410,12 +1411,12 @@ resumedescr, all_virtuals) if isinstance(resumedescr, ResumeAtPositionDescr): - dont_change_position = True + resume_at_position = True else: - dont_change_position = False + resume_at_position = False current_exc = blackholeinterp._prepare_resume_from_failure( - resumedescr.guard_opnum, dont_change_position) + resumedescr.guard_opnum, resume_at_position) try: _run_forever(blackholeinterp, current_exc) From commits-noreply at bitbucket.org Sun Mar 27 15:59:02 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sun, 27 Mar 2011 15:59:02 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: vague first draft of some of the related work parts Message-ID: <20110327135902.0179D282BD7@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3423:c264c1dfa8fb Date: 2011-03-27 15:58 +0200 http://bitbucket.org/pypy/extradoc/changeset/c264c1dfa8fb/ Log: vague first draft of some of the related work parts diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -5,6 +5,7 @@ \usepackage{color} \usepackage{ulem} \usepackage{xspace} +\usepackage{relsize} \usepackage{epsfig} \usepackage{amssymb} \usepackage{amsmath} @@ -814,13 +815,52 @@ \section{Related Work} -partial evaluation \cite{futamura_partial_1999} \cite{jones_partial_1993} +The very first meta-tracer is described by Sullivan et. al. +\cite{sullivan_dynamic_2003}. They used Dynamo RIO, the successor of Dynamo +\cite{XXX} to trace through a small synthetic interpreter. As in Dynamo, tracing +happens on the machine code level. The tracer is instructed by some hints in the +tiny interpreter where the main interpreter loop is and for how long to trace to +match loops in the user-level functions. These hints are comparable to the one +PyPy uses for the same reasons \cite{bolz_tracing_2009}. Their approach suffers +mostly from the low abstraction level that machine code provides. -earlier examples of promotion \cite{carl_friedrich_bolz_towards_????}, \cite{armin_rigo_jit_2007} +Yermolovich et. al. describe the use of the Tamarin JavaScript tracing JIT as a +meta-tracer for a Lua interpreter. They compile the normal Lua interpreter in C +to ActionScript bytecode. Again, the interpreter is annotated with some hints +that indicate the main interpreter loop to the tracer. No further hints are +described in the paper. There is no comparison of their system to the original +Lua VM in C, which makes it hard to judge the effectiveness of the approach. -meta-tracers: SPUR \cite{bebenita_spur:_2010} -tamarin/lua stuff \cite{yermolovich_optimization_2009} -Dynamo \cite{sullivan_dynamic_2003} +SPUR \cite{bebenita_spur:_2010} is a tracing JIT for CIL bytecode, which is then +used to trace through an JavaScript implementation written in C\#. The +JavaScript implementation compiles JavaScript to CIL bytecode together with an +implementation of the JavaScript object model. The object model uses maps +and inline caches to speed up operations on objects. The tracer tracers through +the compiled JavaScript functions and the object model. SPUR contains two hints +that can be used to influence the tracer, one to prevent tracing of a C\# +function and one to force unrolling of a loop (PyPy has equivalent hints, but +they were not described in this paper). + + +Partial evaluation \cite{jones_partial_1993} tries to automatically transform +interpreters into compilers using the second futamura projection +\cite{futamura_partial_1999}. XXX + +An early attempt at building a general environment for implementing languages +efficiently is described by Wolczko et. al. \cite{mario_wolczko_towards_1999}. +They implement Java and Smalltalk on top of the SELF VM by compiling the +languages to SELF. The SELF JIT is good enough to optimize the compiled code +very well. We believe the approach to be restricted to languages that are +similar enough to SELF. XXX + +Somewhat relatedly, the proposed ``invokedynamic'' bytecode +\cite{rose_bytecodes_2009} that will be added to the JVM is supposed to make the +implementation of dynamic languages on top of JVMs easier. The bytecode gives +the language implementor control over how the JIT optimizes the language's +features. XXX + +%We already explored promotion in other context, such as earlier examples of +%promotion \cite{carl_friedrich_bolz_towards_????}, \cite{armin_rigo_jit_2007} \cite{chambers_efficient_1989} maps \cite{hoelzle_optimizing_1994} Type feedback @@ -834,10 +874,6 @@ jruby used versions at some point, jvm-l mailing list discusses them -\cite{mario_wolczko_towards_1999} - -invokedynamic tries to give control to the language implementor about PICs and -deoptimization \section{Conclusion and Next Steps} diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -152,6 +152,21 @@ pages = {465{\textendash}478} }, + at article{rose_bytecodes_2009, + series = {{VMIL} '09}, + title = {Bytecodes meet combinators: invokedynamic on the {JVM}}, + location = {Orlando, Florida}, + shorttitle = {Bytecodes meet combinators}, + doi = {10.1145/1711506.1711508}, + abstract = {The Java Virtual Machine {(JVM)} has been widely adopted in part because of its classfile format, which is portable, compact, modular, verifiable, and reasonably easy to work with. However, it was designed for just one {language---Java---and} so when it is used to express programs in other source languages, there are often "pain points" which retard both development and execution. The most salient pain points show up at a familiar place, the method call site. To generalize method calls on the {JVM,} the {JSR} 292 Expert Group has designed a new invokedynamic instruction that provides user-defined call site semantics. In the chosen design, invokedynamic serves as a hinge-point between two coexisting kinds of intermediate language: bytecode containing dynamic call sites, and combinator graphs specifying call targets. A dynamic compiler can traverse both representations simultaneously, producing optimized machine code which is the seamless union of both kinds of input. As a final twist, the user-defined linkage of a call site may change, allowing the code to adapt as the application evolves over time. The result is a system balancing the conciseness of bytecode with the dynamic flexibility of function pointers.}, + journal = {Proceedings of the Third Workshop on Virtual Machines and Intermediate Languages}, + author = {John R Rose}, + year = {2009}, + note = {{ACM} {ID:} 1711508}, + keywords = {bytecode, code generation, combinator}, + pages = {2:1{\textendash}2:11} +}, + @article{bolz_allocation_2011, series = {{PEPM} '11}, title = {Allocation removal by partial evaluation in a tracing {JIT}}, @@ -298,6 +313,17 @@ pages = {1--12} }, + at techreport{andreas_gal_incremental_2006, + title = {Incremental Dynamic Code Generation with Trace Trees}, + abstract = {The unit of compilation for traditional just-in-time compilers is the method. We have explored trace-based compilation, in which the unit of compilation is a loop, potentially spanning multiple methods and even library code. Using a new intermediate representation that is discovered and updated lazily on-demand while the program is being executed, our compiler generates code that is competitive with traditional dynamic compilers, but that uses only a fraction of the compile time and memory footprint.}, + number = {{ICS-TR-06-16}}, + institution = {Donald Bren School of Information and Computer Science, University of California, Irvine}, + author = {Andreas Gal and Michael Franz}, + month = nov, + year = {2006}, + pages = {11} +}, + @inproceedings{gal_hotpathvm:_2006, address = {Ottawa, Ontario, Canada}, title = {{HotpathVM:} an effective {JIT} compiler for resource-constrained devices}, @@ -346,7 +372,13 @@ isbn = {978-1-60558-769-1}, url = {http://portal.acm.org/citation.cfm?id=1640134.1640147}, doi = {10.1145/1640134.1640147}, - abstract = {Creating an interpreter is a simple and fast way to implement a dynamic programming language. With this ease also come major drawbacks. Interpreters are significantly slower than compiled machine code because they have a high dispatch overhead and cannot perform optimizations. To overcome these limitations, interpreters are commonly combined with just-in-time compilers to improve the overall performance. However, this means that a just-in-time compiler has to be implemented for each language.}, + abstract = {Creating an interpreter is a simple and fast way to implement a dynamic programming language. With this ease also come major drawbacks. Interpreters are significantly slower than compiled machine code because they have a high dispatch overhead and cannot perform optimizations. To overcome these limitations, interpreters are commonly combined with just-in-time compilers to improve the overall performance. However, this means that a just-in-time compiler has to be implemented for each language. + +We explore the approach of taking an interpreter of a dynamic +language and running it on top of an optimizing trace-based virtual machine, i.e., we run a guest {VM} on top of a host {VM.} The host {VM} uses trace recording to observe the guest {VM} executing the application program. Each recorded trace represents a sequence +of guest {VM} bytecodes corresponding to a given execution path +through the application program. The host {VM} optimizes and compiles these traces to machine code, thus eliminating the need for a custom just-in-time compiler for the guest {VM.} The guest {VM} only needs to provide basic information about its interpreter loop to the +host {VM.}}, booktitle = {Proceedings of the 5th symposium on Dynamic languages}, publisher = {{ACM}}, author = {Alexander Yermolovich and Christian Wimmer and Michael Franz}, From commits-noreply at bitbucket.org Sun Mar 27 17:02:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 17:02:17 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: Write the test by making sure that all opts are enabled. Message-ID: <20110327150217.4D3DC282BD7@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42983:b073f54b693e Date: 2011-03-27 16:15 +0200 http://bitbucket.org/pypy/pypy/changeset/b073f54b693e/ Log: Write the test by making sure that all opts are enabled. The test still passes, due to another issue (next checkin). Additionally, fix the fact that OS_LIBFFI_CALL calls can have random side-effects on random fields through callbacks. diff --git a/pypy/jit/codewriter/call.py b/pypy/jit/codewriter/call.py --- a/pypy/jit/codewriter/call.py +++ b/pypy/jit/codewriter/call.py @@ -234,6 +234,8 @@ self.readwrite_analyzer.analyze(op), self.cpu, extraeffect, oopspecindex) # + if oopspecindex != EffectInfo.OS_NONE: + assert effectinfo is not None if pure or loopinvariant: assert effectinfo is not None assert extraeffect != EffectInfo.EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE diff --git a/pypy/jit/codewriter/effectinfo.py b/pypy/jit/codewriter/effectinfo.py --- a/pypy/jit/codewriter/effectinfo.py +++ b/pypy/jit/codewriter/effectinfo.py @@ -101,6 +101,9 @@ def check_forces_virtual_or_virtualizable(self): return self.extraeffect >= self.EF_FORCES_VIRTUAL_OR_VIRTUALIZABLE + def has_random_effects(self): + return self.oopspecindex == self.OS_LIBFFI_CALL + def effectinfo_from_writeanalyze(effects, cpu, extraeffect=EffectInfo.EF_CAN_RAISE, oopspecindex=EffectInfo.OS_NONE): diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -172,7 +172,7 @@ effectinfo = None else: effectinfo = op.getdescr().get_extra_info() - if effectinfo is not None: + if effectinfo is not None and not effectinfo.has_random_effects(): # XXX we can get the wrong complexity here, if the lists # XXX stored on effectinfo are large for fielddescr in effectinfo.readonly_descrs_fields: diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -67,7 +67,7 @@ return entrypoint -def compile(f, gc, **kwds): +def compile(f, gc, enable_opts='', **kwds): from pypy.annotation.listdef import s_list_of_strings from pypy.translator.translator import TranslationContext from pypy.jit.metainterp.warmspot import apply_jit @@ -83,7 +83,7 @@ ann.build_types(f, [s_list_of_strings], main_entry_point=True) t.buildrtyper().specialize() if kwds['jit']: - apply_jit(t, enable_opts='') + apply_jit(t, enable_opts=enable_opts) cbuilder = genc.CStandaloneBuilder(t, f, t.config) cbuilder.generate_source() cbuilder.compile() @@ -572,59 +572,66 @@ def test_compile_framework_minimal_size_in_nursery(self): self.run('compile_framework_minimal_size_in_nursery') - def define_compile_framework_close_stack(self): - from pypy.rlib.libffi import CDLL, types, ArgChain, clibffi - from pypy.rpython.lltypesystem.ll2ctypes import libc_name - from pypy.rpython.annlowlevel import llhelper - # - class Glob(object): - pass - glob = Glob() - class X(object): - pass - # - def callback(p1, p2): - for i in range(100): - glob.lst.append(X()) - return rffi.cast(rffi.INT, 1) - CALLBACK = lltype.Ptr(lltype.FuncType([lltype.Signed, - lltype.Signed], rffi.INT)) - # - @dont_look_inside - def alloc1(): - return llmemory.raw_malloc(16) - @dont_look_inside - def free1(p): - llmemory.raw_free(p) - # - def f42(n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s): - length = len(glob.lst) - raw = alloc1() - argchain = ArgChain() - fn = llhelper(CALLBACK, rffi._make_wrapper_for(CALLBACK, callback)) - argchain = argchain.arg(rffi.cast(lltype.Signed, raw)) - argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 2)) - argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 8)) - argchain = argchain.arg(rffi.cast(lltype.Signed, fn)) - glob.c_qsort.call(argchain, lltype.Void) - free1(raw) - check(len(glob.lst) > length) - del glob.lst[:] + +def test_close_stack(): + from pypy.rlib.libffi import CDLL, types, ArgChain, clibffi + from pypy.rpython.lltypesystem.ll2ctypes import libc_name + from pypy.rpython.annlowlevel import llhelper + from pypy.jit.metainterp.optimizeopt import ALL_OPTS_NAMES + # + class Glob(object): + pass + glob = Glob() + class X(object): + pass + # + def callback(p1, p2): + for i in range(100): + glob.lst.append(X()) + return rffi.cast(rffi.INT, 1) + CALLBACK = lltype.Ptr(lltype.FuncType([lltype.Signed, + lltype.Signed], rffi.INT)) + # + @dont_look_inside + def alloc1(): + return llmemory.raw_malloc(16) + @dont_look_inside + def free1(p): + llmemory.raw_free(p) + # + def f42(): + length = len(glob.lst) + c_qsort = glob.c_qsort + raw = alloc1() + fn = llhelper(CALLBACK, rffi._make_wrapper_for(CALLBACK, callback)) + argchain = ArgChain() + argchain = argchain.arg(rffi.cast(lltype.Signed, raw)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 2)) + argchain = argchain.arg(rffi.cast(rffi.SIZE_T, 8)) + argchain = argchain.arg(rffi.cast(lltype.Signed, fn)) + c_qsort.call(argchain, lltype.Void) + free1(raw) + check(len(glob.lst) > length) + del glob.lst[:] + # + def before(): + libc = CDLL(libc_name) + types_size_t = clibffi.cast_type_to_ffitype(rffi.SIZE_T) + c_qsort = libc.getpointer('qsort', [types.pointer, types_size_t, + types_size_t, types.pointer], + types.void) + glob.c_qsort = c_qsort + glob.lst = [] + # + myjitdriver = JitDriver(greens=[], reds=['n']) + def main(n, x): + before() + while n > 0: + myjitdriver.jit_merge_point(n=n) + f42() n -= 1 - return n, x, x0, x1, x2, x3, x4, x5, x6, x7, l, s - # - def before(n, x): - libc = CDLL(libc_name) - types_size_t = clibffi.cast_type_to_ffitype(rffi.SIZE_T) - c_qsort = libc.getpointer('qsort', [types.pointer, types_size_t, - types_size_t, types.pointer], - types.void) - glob.c_qsort = c_qsort - glob.lst = [] - return (n, None, None, None, None, None, None, - None, None, None, None, None) - # - return before, f42, None - - def test_compile_framework_close_stack(self): - self.run('compile_framework_close_stack') + # + res = compile_and_run(get_entry(get_g(main)), DEFL_GC, + gcrootfinder="asmgcc", jit=True, + enable_opts=ALL_OPTS_NAMES) + assert int(res) == 20 From commits-noreply at bitbucket.org Sun Mar 27 17:02:18 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 17:02:18 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: Phew. Finally got a failing test. The libffi.Func needs to be promoted, which Message-ID: <20110327150218.471AD282BD7@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42984:52d990937d2e Date: 2011-03-27 17:02 +0200 http://bitbucket.org/pypy/pypy/changeset/52d990937d2e/ Log: Phew. Finally got a failing test. The libffi.Func needs to be promoted, which is done from the code in pypy/module/_ffi, but was not done automatically for direct usage. diff --git a/pypy/rlib/libffi.py b/pypy/rlib/libffi.py --- a/pypy/rlib/libffi.py +++ b/pypy/rlib/libffi.py @@ -294,6 +294,7 @@ # the optimizer will fail to recognize the pattern and won't turn it # into a fast CALL. Note that "arg = arg.next" is optimized away, # assuming that archain is completely virtual. + self = jit.hint(self, promote=True) if argchain.numargs != len(self.argtypes): raise TypeError, 'Wrong number of arguments: %d expected, got %d' %\ (argchain.numargs, len(self.argtypes)) diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -85,7 +85,7 @@ if kwds['jit']: apply_jit(t, enable_opts=enable_opts) cbuilder = genc.CStandaloneBuilder(t, f, t.config) - cbuilder.generate_source() + cbuilder.generate_source(defines=cbuilder.DEBUG_DEFINES) cbuilder.compile() return cbuilder From commits-noreply at bitbucket.org Sun Mar 27 17:19:24 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sun, 27 Mar 2011 17:19:24 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: gray out some more ops (thanks armin) Message-ID: <20110327151924.9533D282BDD@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3424:69431dd1b484 Date: 2011-03-27 17:19 +0200 http://bitbucket.org/pypy/extradoc/changeset/69431dd1b484/ Log: gray out some more ops (thanks armin) diff --git a/talk/icooolps2011/code/trace2.tex b/talk/icooolps2011/code/trace2.tex --- a/talk/icooolps2011/code/trace2.tex +++ b/talk/icooolps2011/code/trace2.tex @@ -23,8 +23,8 @@ |{\color{gray}guard($map_3$ == 0xb74af4a8)}| |{\color{gray}$index_3$ = Map.getindex($map_3$, "c")}| |{\color{gray}guard($index_3$ == -1)}| -$cls_2$ = $inst_1$.cls -$methods_2$ = $cls_2$.methods +|{\color{gray}$cls_2$ = $inst_1$.cls}| +|{\color{gray}$methods_2$ = $cls_2$.methods}| $result_3$ = dict.get($methods_2$, "c") guard($result_3$ is not None) From commits-noreply at bitbucket.org Sun Mar 27 18:53:26 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Sun, 27 Mar 2011 18:53:26 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: some changes, some comments Message-ID: <20110327165326.DD222282BA1@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3425:09db589db4af Date: 2011-03-27 18:53 +0200 http://bitbucket.org/pypy/extradoc/changeset/09db589db4af/ Log: some changes, some comments diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -52,6 +52,7 @@ \newcommand\anto[1]{\nb{ANTO}{#1}} \newcommand\arigo[1]{\nb{AR}{#1}} \newcommand\fijal[1]{\nb{FIJAL}{#1}} +\newcommand\pedronis[1]{\nb{PEDRONIS}{#1}} \newcommand{\commentout}[1]{} \newcommand\ie{i.e.,\xspace} @@ -104,12 +105,12 @@ optimize its object model. This is made harder by the fact that many recent languages such as Python, JavaScript or Ruby have rather complex core object semantics. For them, implementing just an interpreter is already an arduous -task. Implementing them efficiently with a just-in-time compiler is -nigh-impossible, because or their many corner-cases. +\pedronis{XXX ardous seems a bit too strong} task. Implementing them efficiently with a just-in-time compiler (JIT) is +extremely challenging, because or their many corner-cases. It has long been an objective of the partial evaluation community to automatically produce compilers from interpreters. There has been a recent -renaissance of this idea using the different technique of tracing just-in-time +renaissance of this idea around the approach of tracing just-in-time compilers. A number of projects have attempted this approach. SPUR \cite{bebenita_spur:_2010} is a tracing JIT for .NET together with a JavaScript implementation in C\#. PyPy \cite{armin_rigo_pypys_2006} contains a tracing JIT for RPython (a restricted @@ -119,9 +120,9 @@ on and optimized with a tracing JIT for JavaScript \cite{yermolovich_optimization_2009}. -These projects have in common that they implement a dynamic language in some -implementation language. In addition they build a tracing JIT for that implementation -language. The tracing JIT then traces through the object model of the dynamic +These projects have in common that they work one meta-level down, providing a tracing JIT for the implementation +language used to implement the dynamic language, and not for the dynamic language itself. +The tracing JIT then will trace through the object model of the dynamic language implementation. This makes the object model transparent to the tracer and its optimizations. Therefore the semantics of the dynamic language does not have to be replicated in a JIT. We call this approach \emph{meta-tracing}. @@ -195,7 +196,7 @@ \label{sub:tracing} A recently popular approach to JIT compilers is that of tracing JITs. Tracing -JITs have their origin in the Dynamo project which used the for dynamic +JITs have their origin in the Dynamo project which used them for dynamic assembler optimization \cite{bala_dynamo:_2000}. Later they were used for to implement a lightweight JIT for Java \cite{gal_hotpathvm:_2006} and for dynamic languages such as JavaScript \cite{gal_trace-based_2009}. @@ -217,8 +218,7 @@ Because the traces always correspond to a concrete execution they cannot contain any control flow splits. Therefore they encode the control flow decisions needed to stay on the trace with the help of \emph{guards}. Those are -operations that check that the assumptions are still true when the trace is -later executed with different values. +operations that check that the assumptions are still true when the compiled trace is later executed with different values. One disadvantage of tracing JITs which makes them not directly applicable to PyPy is that they need to encode the language semantics of the language they are @@ -249,7 +249,7 @@ \end{figure} Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left you -see the levels of execution. The CPU executes the binary of +\pedronis{XXX style: do we want to use you, or we and one} see the levels of execution. The CPU executes the binary of PyPy's Python interpreter, which consists of RPython functions that have been compiled first to C, then to machine code. The interpreter runs a Python program written by a programmer (the user). If the tracer is used, it traces operations on the level @@ -706,7 +706,7 @@ Every time the class changes, \texttt{find\_method} can potentially return a new value. -Therefore, we give every class a version number, which is changed every time a +Therefore, we give every class a version object, which is changed every time a class gets changed (i.e., the content of the \texttt{methods} dictionary changes). This means that the result of \texttt{methods.get()} for a given \texttt{(name, version)} pair will always be the same, i.e. it is a pure operation. To help diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index d36331dd7582c79fa5ddda451e9e9eab656ca51c..91b5aa6d7e372b1f8874c7a9754f583856469075 GIT binary patch [cut] From commits-noreply at bitbucket.org Sun Mar 27 18:58:21 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Sun, 27 Mar 2011 18:58:21 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: reworded something and fixed a typo Message-ID: <20110327165821.02CE7282BD7@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3426:9f9b83762c4e Date: 2011-03-27 12:58 -0400 http://bitbucket.org/pypy/extradoc/changeset/9f9b83762c4e/ Log: reworded something and fixed a typo diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -104,9 +104,9 @@ One of the hardest parts of implementing a dynamic language efficiently is to optimize its object model. This is made harder by the fact that many recent languages such as Python, JavaScript or Ruby have rather complex core object -semantics. For them, implementing just an interpreter is already an arduous -\pedronis{XXX ardous seems a bit too strong} task. Implementing them efficiently with a just-in-time compiler (JIT) is -extremely challenging, because or their many corner-cases. +semantics. For them, implementing just an interpreter is already a complex +task. Implementing them efficiently with a just-in-time compiler (JIT) is +extremely challenging, because of their many corner-cases. It has long been an objective of the partial evaluation community to automatically produce compilers from interpreters. There has been a recent From commits-noreply at bitbucket.org Sun Mar 27 19:44:19 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Sun, 27 Mar 2011 19:44:19 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: give a conceptual level view on the hints Message-ID: <20110327174419.109AF282BDD@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3427:6ace2abd3935 Date: 2011-03-27 19:44 +0200 http://bitbucket.org/pypy/extradoc/changeset/6ace2abd3935/ Log: give a conceptual level view on the hints diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -132,23 +132,28 @@ bare meta-tracing. In this paper we present two of these hints that are extensively used in the -PyPy project to improve the performance of its Python interpreter. These -hints are used to control how the optimizer of the tracing JIT can improve the -traces of the object model. More specifically, these hints influence the -constant folding optimization. The first hint make it possible to turn arbitrary -variables in the trace into constants. The second hint allows the definition of -additional foldable operations. +PyPy project to improve the performance of its Python interpreter. + +Conceptually the significant speed-ups that can be achieved with +dynamic compilation depends on feeding into compilation and exploiting +values observed at runtime that are in practice mostly constant (XXX or should we say slow varying), and +in structuring implementation code and data structures such that more +of such values are at hand. The hints that we present allow exactly to implement such feedback and exploitation in a meta-tracing context. + +Concretely these hints are used to control how the optimizer of the +tracing JIT can improve the traces of the object model. More +specifically, these hints influence the constant folding +optimization. The first hint make it possible to turn arbitrary +variables in the trace into constant. The +second hint allows the definition of additional foldable operations. Together these two hints can be used to express many classic implementation techniques used for object models of dynamic languages, such as maps and polymorphic inline caches. -XXX replace "turn variable into constant" into "main tool to feedback language -and semantics specific runtime information into the compilation" - The contributions of this paper are: \begin{itemize} - \item A hint to turn arbitrary variables into constants in the trace. + \item A hint to turn arbitrary variables into constants in the trace, that means the feedback of runtime information into compilation. \item A way to define new pure operations which the constant folding optimization then recognizes. \item A worked-out example of a simple object model of a dynamic language and diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 91b5aa6d7e372b1f8874c7a9754f583856469075..5868e0dd83072ad3e571f7f53a5c01404089b53a GIT binary patch [cut] From commits-noreply at bitbucket.org Sun Mar 27 20:06:44 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sun, 27 Mar 2011 20:06:44 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: - a title Message-ID: <20110327180644.30F2F282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3428:1aac706863bf Date: 2011-03-27 20:06 +0200 http://bitbucket.org/pypy/extradoc/changeset/1aac706863bf/ Log: - a title - shorten a sentence diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -76,7 +76,7 @@ \begin{document} -\title{XXX in a Tracing JIT Compiler for Efficient Dynamic Languages} +\title{Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages} \authorinfo{Carl Friedrich Bolz \and XXX} {Heinrich-Heine-Universität Düsseldorf, STUPS Group, Germany @@ -135,16 +135,17 @@ PyPy project to improve the performance of its Python interpreter. Conceptually the significant speed-ups that can be achieved with -dynamic compilation depends on feeding into compilation and exploiting -values observed at runtime that are in practice mostly constant (XXX or should we say slow varying), and -in structuring implementation code and data structures such that more -of such values are at hand. The hints that we present allow exactly to implement such feedback and exploitation in a meta-tracing context. +dynamic compilation depend on feeding into compilation and exploiting +values observed at runtime that are slow-varying in practice. To exploit the +runtime feedback, the implementation code and data structures need to be +structured so that many such values are at hand. The hints that we present allow +exactly to implement such feedback and exploitation in a meta-tracing context. Concretely these hints are used to control how the optimizer of the tracing JIT can improve the traces of the object model. More specifically, these hints influence the constant folding -optimization. The first hint make it possible to turn arbitrary -variables in the trace into constant. The +optimization. The first hint makes it possible to turn arbitrary +variables in the trace into constant by feeding back runtime values. The second hint allows the definition of additional foldable operations. Together these two hints can be used to express many classic implementation @@ -153,7 +154,8 @@ The contributions of this paper are: \begin{itemize} - \item A hint to turn arbitrary variables into constants in the trace, that means the feedback of runtime information into compilation. + \item A hint to turn arbitrary variables into constants in the trace, that + means the feedback of runtime information into compilation. \item A way to define new pure operations which the constant folding optimization then recognizes. \item A worked-out example of a simple object model of a dynamic language and From commits-noreply at bitbucket.org Sun Mar 27 20:19:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 20:19:17 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: Typos and typographic details. Message-ID: <20110327181917.5EC96282BDD@codespeak.net> Author: Armin Rigo Branch: extradoc Changeset: r3429:d239d28e11cf Date: 2011-03-27 17:39 +0200 http://bitbucket.org/pypy/extradoc/changeset/d239d28e11cf/ Log: Typos and typographic details. diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -134,7 +134,7 @@ PyPy project to improve the performance of its Python interpreter. These hints are used to control how the optimizer of the tracing JIT can improve the traces of the object model. More specifically, these hints influence the -constant folding optimization. The first hint make it possible to turn arbitrary +constant folding optimization. The first hint makes it possible to turn arbitrary variables in the trace into constants. The second hint allows the definition of additional foldable operations. @@ -195,21 +195,21 @@ \label{sub:tracing} A recently popular approach to JIT compilers is that of tracing JITs. Tracing -JITs have their origin in the Dynamo project which used the for dynamic -assembler optimization \cite{bala_dynamo:_2000}. Later they were used for to implement +JITs have their origin in the Dynamo project, which used one of them for dynamic +assembler optimization \cite{bala_dynamo:_2000}. Later they were used to implement a lightweight JIT for Java \cite{gal_hotpathvm:_2006} and for dynamic languages such as JavaScript \cite{gal_trace-based_2009}. A tracing JIT works by recording traces of concrete execution paths through the program. Those traces are therefore linear list of operations, which are optimized and then -get turned into machine code. This recording automatically inlines functions, +get turned into machine code. This recording automatically inlines functions: when a function call is encountered the operations of the called functions are simply put into the trace too. To be able to do this recording, VMs with a tracing JIT typically contain an interpreter. After a user program is -started the interpreter is used until the most important paths through the user +started the interpreter is used; only the most frequently executed paths through the user program are turned into machine code. The tracing JIT tries to produce traces that correspond to loops in the traced program, but most tracing JITs now also have support for tracing non-loops \cite{XXX}. @@ -220,7 +220,7 @@ operations that check that the assumptions are still true when the trace is later executed with different values. -One disadvantage of tracing JITs which makes them not directly applicable to +One disadvantage of (tracing) JITs which makes them not directly applicable to PyPy is that they need to encode the language semantics of the language they are tracing. Since PyPy wants to be a general framework, we want to reuse our tracer for different languages. @@ -322,7 +322,7 @@ The trace would look like in Figure~\ref{fig:trace1}. In this example, the attribute \texttt{a} is found on the instance, but the -attributes \texttt{b} and \texttt{c} are found on the class. The numbers line +attributes \texttt{b} and \texttt{c} are found on the class. The line numbers in the trace correspond to the line numbers in Figure~\ref{fig:interpreter-slow} where the traced operations come from. The trace indeed contains @@ -400,7 +400,7 @@ into a constant value. This process is called \emph{promotion} and it is an old idea in partial evaluation (it's called ``The Trick'' \cite{jones_partial_1993} there). Promotion is also heavily used by Psyco \cite{rigo_representation-based_2004} and by all older versions -of PyPy's JIT. Promotion is a technique that only works well in JIT compilers, +of PyPy's JIT. Promotion is a technique that only works well in JIT compilers; in static compilers it is significantly less applicable. Promotion is essentially a tool for trace specialization. In some places in the @@ -492,14 +492,14 @@ have values that are variable but vary little in the context of parts of a user program. An example would be the types of variables in a user function. Even though in principle the argument to a Python function could be any Python type, -in practice the argument types tend to not vary often. Therefore it is possible to +in practice the argument types tend not to vary often. Therefore it is possible to promote the types. The next section will present a complete example of how this works. \subsection{Declaring New Pure Operations} -In the last section we saw a way to turn arbitrary variables into constants. All +In the previous section we saw a way to turn arbitrary variables into constants. All pure operations on these constants can be constant-folded. This works great for constant folding of simple types, e.g. integers. Unfortunately, in the context of an interpreter for a dynamic @@ -571,7 +571,7 @@ Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used during tracing. The call to \texttt{compute} is not inlined, so that the optimizer -has a chance to see it. Since \texttt{compute} function is marked as pure, and its +has a chance to see it. Since the \texttt{compute} function is marked as pure, and its argument is a constant reference, the call will be removed by the optimizer. The final trace looks like this: @@ -706,6 +706,8 @@ Every time the class changes, \texttt{find\_method} can potentially return a new value. +XXX should we say ``version number'' all around when it is really an object? + Therefore, we give every class a version number, which is changed every time a class gets changed (i.e., the content of the \texttt{methods} dictionary changes). This means that the result of \texttt{methods.get()} for a given \texttt{(name, @@ -746,7 +748,7 @@ The index \texttt{0} that is used to read out of the \texttt{storage} array is the result of the constant-folded \texttt{getindex} call. The constants \texttt{41} and \texttt{17} are the results of the folding of the -\texttt{\_find\_method`} calls. This final trace is now very good. It no longer performs any +\texttt{\_find\_method} calls. This final trace is now very good. It no longer performs any dictionary lookups. Instead it contains several guards. The first guard checks that the map is still the same. This guard will fail if the same code is executed with an instance that has another layout. The second guard @@ -835,9 +837,9 @@ used to trace through an JavaScript implementation written in C\#. The JavaScript implementation compiles JavaScript to CIL bytecode together with an implementation of the JavaScript object model. The object model uses maps -and inline caches to speed up operations on objects. The tracer tracers through +and inline caches to speed up operations on objects. The tracer traces through the compiled JavaScript functions and the object model. SPUR contains two hints -that can be used to influence the tracer, one to prevent tracing of a C\# +that can be used to influence the tracer: one to prevent tracing of a C\# function and one to force unrolling of a loop (PyPy has equivalent hints, but they were not described in this paper). From commits-noreply at bitbucket.org Sun Mar 27 20:19:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 20:19:17 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: merge heads Message-ID: <20110327181917.D04D3282BDD@codespeak.net> Author: Armin Rigo Branch: extradoc Changeset: r3430:a44895720f96 Date: 2011-03-27 20:16 +0200 http://bitbucket.org/pypy/extradoc/changeset/a44895720f96/ Log: merge heads diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -201,21 +201,21 @@ \label{sub:tracing} A recently popular approach to JIT compilers is that of tracing JITs. Tracing -JITs have their origin in the Dynamo project which used them for dynamic -assembler optimization \cite{bala_dynamo:_2000}. Later they were used for to implement +JITs have their origin in the Dynamo project, which used one of them for dynamic +assembler optimization \cite{bala_dynamo:_2000}. Later they were used to implement a lightweight JIT for Java \cite{gal_hotpathvm:_2006} and for dynamic languages such as JavaScript \cite{gal_trace-based_2009}. A tracing JIT works by recording traces of concrete execution paths through the program. Those traces are therefore linear list of operations, which are optimized and then -get turned into machine code. This recording automatically inlines functions, +get turned into machine code. This recording automatically inlines functions: when a function call is encountered the operations of the called functions are simply put into the trace too. To be able to do this recording, VMs with a tracing JIT typically contain an interpreter. After a user program is -started the interpreter is used until the most important paths through the user +started the interpreter is used; only the most frequently executed paths through the user program are turned into machine code. The tracing JIT tries to produce traces that correspond to loops in the traced program, but most tracing JITs now also have support for tracing non-loops \cite{XXX}. @@ -225,7 +225,7 @@ decisions needed to stay on the trace with the help of \emph{guards}. Those are operations that check that the assumptions are still true when the compiled trace is later executed with different values. -One disadvantage of tracing JITs which makes them not directly applicable to +One disadvantage of (tracing) JITs which makes them not directly applicable to PyPy is that they need to encode the language semantics of the language they are tracing. Since PyPy wants to be a general framework, we want to reuse our tracer for different languages. @@ -327,7 +327,7 @@ The trace would look like in Figure~\ref{fig:trace1}. In this example, the attribute \texttt{a} is found on the instance, but the -attributes \texttt{b} and \texttt{c} are found on the class. The numbers line +attributes \texttt{b} and \texttt{c} are found on the class. The line numbers in the trace correspond to the line numbers in Figure~\ref{fig:interpreter-slow} where the traced operations come from. The trace indeed contains @@ -405,7 +405,7 @@ into a constant value. This process is called \emph{promotion} and it is an old idea in partial evaluation (it's called ``The Trick'' \cite{jones_partial_1993} there). Promotion is also heavily used by Psyco \cite{rigo_representation-based_2004} and by all older versions -of PyPy's JIT. Promotion is a technique that only works well in JIT compilers, +of PyPy's JIT. Promotion is a technique that only works well in JIT compilers; in static compilers it is significantly less applicable. Promotion is essentially a tool for trace specialization. In some places in the @@ -497,14 +497,14 @@ have values that are variable but vary little in the context of parts of a user program. An example would be the types of variables in a user function. Even though in principle the argument to a Python function could be any Python type, -in practice the argument types tend to not vary often. Therefore it is possible to +in practice the argument types tend not to vary often. Therefore it is possible to promote the types. The next section will present a complete example of how this works. \subsection{Declaring New Pure Operations} -In the last section we saw a way to turn arbitrary variables into constants. All +In the previous section we saw a way to turn arbitrary variables into constants. All pure operations on these constants can be constant-folded. This works great for constant folding of simple types, e.g. integers. Unfortunately, in the context of an interpreter for a dynamic @@ -576,7 +576,7 @@ Here, \texttt{0xb73984a8} is the address of the instance of \texttt{A} that was used during tracing. The call to \texttt{compute} is not inlined, so that the optimizer -has a chance to see it. Since \texttt{compute} function is marked as pure, and its +has a chance to see it. Since the \texttt{compute} function is marked as pure, and its argument is a constant reference, the call will be removed by the optimizer. The final trace looks like this: @@ -711,6 +711,8 @@ Every time the class changes, \texttt{find\_method} can potentially return a new value. +XXX should we say ``version number'' all around when it is really an object? + Therefore, we give every class a version object, which is changed every time a class gets changed (i.e., the content of the \texttt{methods} dictionary changes). This means that the result of \texttt{methods.get()} for a given \texttt{(name, @@ -751,7 +753,7 @@ The index \texttt{0} that is used to read out of the \texttt{storage} array is the result of the constant-folded \texttt{getindex} call. The constants \texttt{41} and \texttt{17} are the results of the folding of the -\texttt{\_find\_method`} calls. This final trace is now very good. It no longer performs any +\texttt{\_find\_method} calls. This final trace is now very good. It no longer performs any dictionary lookups. Instead it contains several guards. The first guard checks that the map is still the same. This guard will fail if the same code is executed with an instance that has another layout. The second guard @@ -840,9 +842,9 @@ used to trace through an JavaScript implementation written in C\#. The JavaScript implementation compiles JavaScript to CIL bytecode together with an implementation of the JavaScript object model. The object model uses maps -and inline caches to speed up operations on objects. The tracer tracers through +and inline caches to speed up operations on objects. The tracer traces through the compiled JavaScript functions and the object model. SPUR contains two hints -that can be used to influence the tracer, one to prevent tracing of a C\# +that can be used to influence the tracer: one to prevent tracing of a C\# function and one to force unrolling of a loop (PyPy has equivalent hints, but they were not described in this paper). From commits-noreply at bitbucket.org Sun Mar 27 20:19:18 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 20:19:18 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: Finish renaming 'version number' into 'version object'. Message-ID: <20110327181918.3CED3282BDD@codespeak.net> Author: Armin Rigo Branch: extradoc Changeset: r3431:736c5eb6bfb4 Date: 2011-03-27 20:16 +0200 http://bitbucket.org/pypy/extradoc/changeset/736c5eb6bfb4/ Log: Finish renaming 'version number' into 'version object'. diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -711,8 +711,6 @@ Every time the class changes, \texttt{find\_method} can potentially return a new value. -XXX should we say ``version number'' all around when it is really an object? - Therefore, we give every class a version object, which is changed every time a class gets changed (i.e., the content of the \texttt{methods} dictionary changes). This means that the result of \texttt{methods.get()} for a given \texttt{(name, @@ -729,7 +727,7 @@ What is interesting here is that \texttt{\_find\_method} takes the \texttt{version} argument but it does not use it at all. Its only purpose is to make the call -pure, because when the version number changes, the result of the call might be +pure, because when the version object changes, the result of the call might be different than the previous one. \begin{figure} From commits-noreply at bitbucket.org Sun Mar 27 20:19:18 2011 From: commits-noreply at bitbucket.org (arigo) Date: Sun, 27 Mar 2011 20:19:18 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: merge heads Message-ID: <20110327181918.9E940282BDD@codespeak.net> Author: Armin Rigo Branch: extradoc Changeset: r3432:f5e51af76a1f Date: 2011-03-27 20:18 +0200 http://bitbucket.org/pypy/extradoc/changeset/f5e51af76a1f/ Log: merge heads diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -76,7 +76,7 @@ \begin{document} -\title{XXX in a Tracing JIT Compiler for Efficient Dynamic Languages} +\title{Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages} \authorinfo{Carl Friedrich Bolz \and XXX} {Heinrich-Heine-Universität Düsseldorf, STUPS Group, Germany @@ -135,16 +135,17 @@ PyPy project to improve the performance of its Python interpreter. Conceptually the significant speed-ups that can be achieved with -dynamic compilation depends on feeding into compilation and exploiting -values observed at runtime that are in practice mostly constant (XXX or should we say slow varying), and -in structuring implementation code and data structures such that more -of such values are at hand. The hints that we present allow exactly to implement such feedback and exploitation in a meta-tracing context. +dynamic compilation depend on feeding into compilation and exploiting +values observed at runtime that are slow-varying in practice. To exploit the +runtime feedback, the implementation code and data structures need to be +structured so that many such values are at hand. The hints that we present allow +exactly to implement such feedback and exploitation in a meta-tracing context. Concretely these hints are used to control how the optimizer of the tracing JIT can improve the traces of the object model. More specifically, these hints influence the constant folding -optimization. The first hint make it possible to turn arbitrary -variables in the trace into constant. The +optimization. The first hint makes it possible to turn arbitrary +variables in the trace into constant by feeding back runtime values. The second hint allows the definition of additional foldable operations. Together these two hints can be used to express many classic implementation @@ -153,7 +154,8 @@ The contributions of this paper are: \begin{itemize} - \item A hint to turn arbitrary variables into constants in the trace, that means the feedback of runtime information into compilation. + \item A hint to turn arbitrary variables into constants in the trace, that + means the feedback of runtime information into compilation. \item A way to define new pure operations which the constant folding optimization then recognizes. \item A worked-out example of a simple object model of a dynamic language and From commits-noreply at bitbucket.org Sun Mar 27 20:30:30 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 20:30:30 +0200 (CEST) Subject: [pypy-svn] pypy jit-usable_retrace: Test GUARD_OVERFLOW aswell. Currently it will not be moved out of the loop and end up in the short preamble Message-ID: <20110327183030.833F9282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42985:7ce37a96d335 Date: 2011-03-27 15:47 +0200 http://bitbucket.org/pypy/pypy/changeset/7ce37a96d335/ Log: Test GUARD_OVERFLOW aswell. Currently it will not be moved out of the loop and end up in the short preamble diff --git a/pypy/jit/metainterp/test/test_basic.py b/pypy/jit/metainterp/test/test_basic.py --- a/pypy/jit/metainterp/test/test_basic.py +++ b/pypy/jit/metainterp/test/test_basic.py @@ -464,6 +464,8 @@ if y&4 == 0: x1, x2 = x2, x1 return res + res = self.meta_interp(f, [sys.maxint, 6, 32, 48]) + assert res == f(sys.maxint, 6, 32, 48) res = self.meta_interp(f, [6, sys.maxint, 32, 48]) assert res == f(6, sys.maxint, 32, 48) From commits-noreply at bitbucket.org Sun Mar 27 20:30:32 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 20:30:32 +0200 (CEST) Subject: [pypy-svn] pypy jit-usable_retrace: fixed merge error, only insert GUARD_NO_OVERFLOW when needed Message-ID: <20110327183032.0E989282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42986:dacd6928545b Date: 2011-03-27 16:10 +0200 http://bitbucket.org/pypy/pypy/changeset/dacd6928545b/ Log: fixed merge error, only insert GUARD_NO_OVERFLOW when needed diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -492,11 +492,12 @@ self.boxes_seen_in_short[box] = True op = self.optimizer.producer[box] - ok = False + ok = need_ovf_guard = False if op.is_always_pure(): ok = True elif op.is_ovf() and op in self.optimizer.overflow_guarded: ok = True + need_ovf_guard = True elif op.has_no_side_effect(): # FIXME: When are these safe to include? Allow getitems only # if they are still in the heap cache? @@ -512,8 +513,9 @@ self.produce_box_in_short_preamble(arg) if self.short_operations is not None: self.short_operations.append(op) - guard = ResOperation(rop.GUARD_NO_OVERFLOW, [], None) - self.short_operations.append(guard) + if need_ovf_guard: + guard = ResOperation(rop.GUARD_NO_OVERFLOW, [], None) + self.short_operations.append(guard) else: import pdb; pdb.set_trace() diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -90,12 +90,12 @@ fieldvalue = optheap.getvalue(op.getarg(1)) self.remember_field_value(structvalue, fieldvalue) - def get_reconstructed(self, optimizer, valuemap): + def get_cloned(self, optimizer, valuemap): assert self._lazy_setfield is None cf = CachedField() for structvalue, fieldvalue in self._cached_fields.iteritems(): - structvalue2 = structvalue.get_reconstructed(optimizer, valuemap) - fieldvalue2 = fieldvalue .get_reconstructed(optimizer, valuemap) + structvalue2 = structvalue.get_cloned(optimizer, valuemap) + fieldvalue2 = fieldvalue .get_cloned(optimizer, valuemap) cf._cached_fields[structvalue2] = fieldvalue2 return cf @@ -129,7 +129,7 @@ assert 0 # was: new.lazy_setfields = self.lazy_setfields for descr, d in self.cached_fields.items(): - new.cached_fields[descr] = d.get_cloneded(optimizer, valuemap) + new.cached_fields[descr] = d.get_cloned(optimizer, valuemap) new.cached_arrayitems = {} for descr, d in self.cached_arrayitems.items(): From commits-noreply at bitbucket.org Sun Mar 27 20:30:33 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 20:30:33 +0200 (CEST) Subject: [pypy-svn] pypy jit-usable_retrace: leftover debuging Message-ID: <20110327183033.8E5F8282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-usable_retrace Changeset: r42987:af6befdd8f82 Date: 2011-03-27 18:01 +0200 http://bitbucket.org/pypy/pypy/changeset/af6befdd8f82/ Log: leftover debuging diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -517,8 +517,7 @@ guard = ResOperation(rop.GUARD_NO_OVERFLOW, [], None) self.short_operations.append(guard) else: - import pdb; pdb.set_trace() - + #import pdb; pdb.set_trace() self.short_operations = None def create_short_preamble(self, preamble, loop): From commits-noreply at bitbucket.org Sun Mar 27 20:30:35 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 20:30:35 +0200 (CEST) Subject: [pypy-svn] pypy jit-str_in_preamble: hg merge default Message-ID: <20110327183035.463C2282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r42988:3cb0372b725f Date: 2011-03-27 18:04 +0200 http://bitbucket.org/pypy/pypy/changeset/3cb0372b725f/ Log: hg merge default diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -819,6 +819,52 @@ """ self.optimize_loop(ops, expected, preamble) + def test_compare_with_itself(self): + ops = """ + [] + i0 = escape() + i1 = int_lt(i0, i0) + guard_false(i1) [] + i2 = int_le(i0, i0) + guard_true(i2) [] + i3 = int_eq(i0, i0) + guard_true(i3) [] + i4 = int_ne(i0, i0) + guard_false(i4) [] + i5 = int_gt(i0, i0) + guard_false(i5) [] + i6 = int_ge(i0, i0) + guard_true(i6) [] + jump() + """ + expected = """ + [] + i0 = escape() + jump() + """ + self.optimize_loop(ops, expected) + + def test_compare_with_itself_uint(self): + py.test.skip("implement me") + ops = """ + [] + i0 = escape() + i7 = uint_lt(i0, i0) + guard_false(i7) [] + i8 = uint_le(i0, i0) + guard_true(i8) [] + i9 = uint_gt(i0, i0) + guard_false(i9) [] + i10 = uint_ge(i0, i0) + guard_true(i10) [] + jump() + """ + expected = """ + [] + i0 = escape() + jump() + """ + self.optimize_loop(ops, expected) diff --git a/pypy/jit/metainterp/optimizeopt/intbounds.py b/pypy/jit/metainterp/optimizeopt/intbounds.py --- a/pypy/jit/metainterp/optimizeopt/intbounds.py +++ b/pypy/jit/metainterp/optimizeopt/intbounds.py @@ -206,7 +206,7 @@ v2 = self.getvalue(op.getarg(1)) if v1.intbound.known_lt(v2.intbound): self.make_constant_int(op.result, 1) - elif v1.intbound.known_ge(v2.intbound): + elif v1.intbound.known_ge(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 0) else: self.emit_operation(op) @@ -216,7 +216,7 @@ v2 = self.getvalue(op.getarg(1)) if v1.intbound.known_gt(v2.intbound): self.make_constant_int(op.result, 1) - elif v1.intbound.known_le(v2.intbound): + elif v1.intbound.known_le(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 0) else: self.emit_operation(op) @@ -224,7 +224,7 @@ def optimize_INT_LE(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) - if v1.intbound.known_le(v2.intbound): + if v1.intbound.known_le(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 1) elif v1.intbound.known_gt(v2.intbound): self.make_constant_int(op.result, 0) @@ -234,7 +234,7 @@ def optimize_INT_GE(self, op): v1 = self.getvalue(op.getarg(0)) v2 = self.getvalue(op.getarg(1)) - if v1.intbound.known_ge(v2.intbound): + if v1.intbound.known_ge(v2.intbound) or v1 is v2: self.make_constant_int(op.result, 1) elif v1.intbound.known_lt(v2.intbound): self.make_constant_int(op.result, 0) From commits-noreply at bitbucket.org Sun Mar 27 20:30:36 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Sun, 27 Mar 2011 20:30:36 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: move inputargs generation to VirtualState to make it reusable to create the jumpargs Message-ID: <20110327183036.A5149282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r42989:96e7b4920fa5 Date: 2011-03-27 20:01 +0200 http://bitbucket.org/pypy/pypy/changeset/96e7b4920fa5/ Log: move inputargs generation to VirtualState to make it reusable to create the jumpargs diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -145,6 +145,16 @@ self.state[i].generate_guards(other.state[i], args[i], cpu, extra_guards) + def make_inputargs(self, values): + assert len(values) == len(self.state) + inputargs = [] + seen_inputargs = {} + for i in range(len(values)): + self.state[i].enum_forced_boxes(inputargs, seen_inputargs, + values[i]) + return [a for a in inputargs if not isinstance(a, Const)] + + class VirtualStateAdder(resume.ResumeDataVirtualAdder): def __init__(self, optimizer): self.fieldboxes = {} @@ -233,6 +243,12 @@ if self.level == LEVEL_CONSTANT: import pdb; pdb.set_trace() raise NotImplementedError + + def enum_forced_boxes(self, boxes, already_seen, value): + key = value.get_key_box() + if key not in already_seen: + boxes.append(value.force_box()) + already_seen[value.get_key_box()] = None class UnrollOptimizer(Optimization): @@ -271,7 +287,8 @@ try: inputargs = self.inline(self.cloned_operations, - loop.inputargs, jump_args) + loop.inputargs, jump_args, + virtual_state) except KeyError: debug_print("Unrolling failed.") loop.preamble.operations = None @@ -342,20 +359,15 @@ if op.result: op.result.forget_value() - def inline(self, loop_operations, loop_args, jump_args): + def inline(self, loop_operations, loop_args, jump_args, virtual_state): self.inliner = inliner = Inliner(loop_args, jump_args) - + + # FIXME: Move this to reconstruct for v in self.optimizer.values.values(): v.last_guard_index = -1 # FIXME: Are there any more indexes stored? - inputargs = [] - seen_inputargs = {} - for arg in jump_args: - boxes = [] - self.getvalue(arg).enum_forced_boxes(boxes, seen_inputargs) - for a in boxes: - if not isinstance(a, Const): - inputargs.append(a) + values = [self.getvalue(arg) for arg in jump_args] + inputargs = virtual_state.make_inputargs(values) # This loop is equivalent to the main optimization loop in # Optimizer.propagate_all_forward @@ -364,7 +376,8 @@ newop.initarglist(inputargs) newop = inliner.inline_op(newop, clone=False) - self.optimizer.first_optimization.propagate_forward(newop) + #self.optimizer.first_optimization.propagate_forward(newop) + self.optimizer.send_extra_operation(newop) # Remove jump to make sure forced code are placed before it newoperations = self.optimizer.newoperations diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -445,6 +445,9 @@ def _generate_guards(self, other, box, cpu, extra_guards): raise InvalidLoop + + def enum_forced_boxes(self, boxes, already_seen, value): + raise NotImplementedError class AbstractVirtualStructInfo(AbstractVirtualInfo): def __init__(self, fielddescrs): @@ -484,6 +487,19 @@ def _generalization_of(self, other): raise NotImplementedError + def enum_forced_boxes(self, boxes, already_seen, value): + #FIXME: assert isinstance(value, AbstractVirtualStructValue) + key = value.get_key_box() + if key in already_seen: + return + already_seen[key] = None + if value.box is None: + for i in range(len(self.fielddescrs)): + v = value._fields[self.fielddescrs[i]] + self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) + else: + boxes.append(value.box) + class VirtualInfo(AbstractVirtualStructInfo): def __init__(self, known_class, fielddescrs): @@ -572,6 +588,21 @@ return False return True + def enum_forced_boxes(self, boxes, already_seen, value): + # FIXME: assert isinstance(value, VArrayValue) + key = value.get_key_box() + if key in already_seen: + return + already_seen[key] = None + if value.box is None: + for i in range(len(self.fieldstate)): + v = value._items[i] + self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) + else: + boxes.append(value.box) + + + class VStrPlainInfo(AbstractVirtualInfo): """Stands for the string made out of the characters of all fieldnums.""" diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -52,12 +52,6 @@ def get_key_box(self): return self.box - def enum_forced_boxes(self, boxes, already_seen): - key = self.get_key_box() - if key not in already_seen: - boxes.append(self.force_box()) - already_seen[self.get_key_box()] = None - def get_cloned(self, optimizer, valuemap, force_if_needed=True): if self in valuemap: return valuemap[self] diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py --- a/pypy/jit/metainterp/optimizeopt/virtualize.py +++ b/pypy/jit/metainterp/optimizeopt/virtualize.py @@ -132,18 +132,6 @@ fieldvalue = self._fields[ofs] fieldvalue.get_args_for_fail(modifier) - def enum_forced_boxes(self, boxes, already_seen): - key = self.get_key_box() - if key in already_seen: - return - already_seen[key] = None - if self.box is None: - lst = self._get_field_descr_list() - for ofs in lst: - self._fields[ofs].enum_forced_boxes(boxes, already_seen) - else: - boxes.append(self.box) - def clone_for_next_iteration(self, optimizer): raise NotImplementedError @@ -247,17 +235,6 @@ def _make_virtual(self, modifier): return modifier.make_varray(self.arraydescr) - def enum_forced_boxes(self, boxes, already_seen): - key = self.get_key_box() - if key in already_seen: - return - already_seen[key] = None - if self.box is None: - for itemvalue in self._items: - itemvalue.enum_forced_boxes(boxes, already_seen) - else: - boxes.append(self.box) - def clone_for_next_iteration(self, optimizer): new = VArrayValue(optimizer, self.arraydescr, len(self._items), self.keybox, self.source_op) From commits-noreply at bitbucket.org Sun Mar 27 20:44:11 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Sun, 27 Mar 2011 20:44:11 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: second dynamo ref Message-ID: <20110327184411.8D95B282BA1@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3433:c9e82ee18d61 Date: 2011-03-27 20:43 +0200 http://bitbucket.org/pypy/extradoc/changeset/c9e82ee18d61/ Log: second dynamo ref diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -824,7 +824,7 @@ The very first meta-tracer is described by Sullivan et. al. \cite{sullivan_dynamic_2003}. They used Dynamo RIO, the successor of Dynamo -\cite{XXX} to trace through a small synthetic interpreter. As in Dynamo, tracing +\cite{bala_dynamo:_2000} to trace through a small synthetic interpreter. As in Dynamo, tracing happens on the machine code level. The tracer is instructed by some hints in the tiny interpreter where the main interpreter loop is and for how long to trace to match loops in the user-level functions. These hints are comparable to the one diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 5868e0dd83072ad3e571f7f53a5c01404089b53a..4685bf0da4320b5f3d58b2a19b666d02ae5a5fd4 GIT binary patch [cut] From commits-noreply at bitbucket.org Sun Mar 27 21:43:08 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Sun, 27 Mar 2011 21:43:08 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: few fixes, comment out a section to save space Message-ID: <20110327194308.AE62A282BA1@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3434:c22e0c884dd6 Date: 2011-03-27 21:42 +0200 http://bitbucket.org/pypy/extradoc/changeset/c22e0c884dd6/ Log: few fixes, comment out a section to save space diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -255,8 +255,8 @@ \label{fig:trace-levels} \end{figure} -Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left you -\pedronis{XXX style: do we want to use you, or we and one} see the levels of execution. The CPU executes the binary of +Figure~\ref{fig:trace-levels} shows a diagram of the process. On the left are +the levels of execution. The CPU executes the binary of PyPy's Python interpreter, which consists of RPython functions that have been compiled first to C, then to machine code. The interpreter runs a Python program written by a programmer (the user). If the tracer is used, it traces operations on the level @@ -593,7 +593,7 @@ On the one hand, the \texttt{purefunction} annotation is very powerful. It can be used to constant-fold arbitrary parts of the computation in the interpreter. -However, the annotation also gives you ample opportunity to mess things up. If a +However, the annotation also gives the interpreter author ample opportunity to mess things up. If a function is annotated to be pure, but is not really, the optimizer can produce subtly wrong code. Therefore, a lot of care has to be taken when using this annotation. @@ -607,7 +607,7 @@ be possible in this particular case, but in practice the functions that are annotated with the \texttt{purefunction} decorator are usually more complex. The easiest example for this is that of a function that uses memoization to -cache its results. If you analyze this function, it looks like the function has +cache its results. If this function is analyzed, it looks like the function has side effects, because it changes the memoizing dictionary. However, because this side effect is not externally visible, the function from the outside is pure. This is a property that is not easily detectable by analysis. Therefore, the purity @@ -797,21 +797,21 @@ %___________________________________________________________________________ -\subsection{More General Patterns} - -The techniques we used above to make instance and class lookups faster are -applicable in more general cases than the one we developed them for. A more -abstract view of maps is that of splitting a data-structure into a part that -changes slowly, and a part that changes quickly. In the concrete example of maps -we split the original dictionary into the map (the slow-changing part) and the -storage array (the quick-changing part). All the computation on the -slow-changing part can be constant-folded during tracing so that only the -manipulation of the quick-changing part remains. - -Similarly, versions can be used to constant-fold arbitrary functions of large data -structures. The version needs to be updated carefully every time the result of -this function can change. Therefore this is useful only if the data structure is -expected to change slowly. +%\subsection{More General Patterns} +% +%The techniques we used above to make instance and class lookups faster are +%applicable in more general cases than the one we developed them for. A more +%abstract view of maps is that of splitting a data-structure into a part that +%changes slowly, and a part that changes quickly. In the concrete example of maps +%we split the original dictionary into the map (the slow-changing part) and the +%storage array (the quick-changing part). All the computation on the +%slow-changing part can be constant-folded during tracing so that only the +%manipulation of the quick-changing part remains. +% +%Similarly, versions can be used to constant-fold arbitrary functions of large data +%structures. The version needs to be updated carefully every time the result of +%this function can change. Therefore this is useful only if the data structure is +%expected to change slowly. %___________________________________________________________________________ @@ -858,7 +858,8 @@ They implement Java and Smalltalk on top of the SELF VM by compiling the languages to SELF. The SELF JIT is good enough to optimize the compiled code very well. We believe the approach to be restricted to languages that are -similar enough to SELF. XXX +similar enough to SELF as there were no mechanisms to control the underlying +compiler. Somewhat relatedly, the proposed ``invokedynamic'' bytecode \cite{rose_bytecodes_2009} that will be added to the JVM is supposed to make the @@ -874,10 +875,10 @@ \cite{hoelzle_optimizing_1991} PICs find the web page of V8 about maps -is there anything about versions? smalltalks tend to clear their method caches -when new methods are added. self and java use dependency tracking and -deoptimization. this is better what we have above, because we need runtime -checks. mention out of line guard? +%is there anything about versions? smalltalks tend to clear their method caches +%when new methods are added. self and java use dependency tracking and +%deoptimization. this is better what we have above, because we need runtime +%checks. mention out of line guard? jruby used versions at some point, jvm-l mailing list discusses them From commits-noreply at bitbucket.org Mon Mar 28 01:22:01 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 28 Mar 2011 01:22:01 +0200 (CEST) Subject: [pypy-svn] pypy default: Don't generate nonsense for UNPACK_ITERABLE with a non-{list, tuple} item (specifically array) Message-ID: <20110327232201.BEFA9282BA1@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42990:6e681c7a274f Date: 2011-03-27 19:21 -0400 http://bitbucket.org/pypy/pypy/changeset/6e681c7a274f/ Log: Don't generate nonsense for UNPACK_ITERABLE with a non-{list,tuple} item (specifically array) diff --git a/pypy/module/pypyjit/test_pypy_c/model.py b/pypy/module/pypyjit/test_pypy_c/model.py --- a/pypy/module/pypyjit/test_pypy_c/model.py +++ b/pypy/module/pypyjit/test_pypy_c/model.py @@ -260,7 +260,7 @@ @classmethod def is_const(cls, v1): return isinstance(v1, str) and v1.startswith('ConstClass(') - + def match_var(self, v1, exp_v2): assert v1 != '_' if exp_v2 == '_': @@ -287,7 +287,7 @@ for arg, exp_arg in zip(op.args, exp_args): self._assert(self.match_var(arg, exp_arg), "variable mismatch: %r instead of %r" % (arg, exp_arg)) self.match_descr(op.descr, exp_descr) - + def _next_op(self, iter_ops, assert_raises=False): try: diff --git a/pypy/interpreter/baseobjspace.py b/pypy/interpreter/baseobjspace.py --- a/pypy/interpreter/baseobjspace.py +++ b/pypy/interpreter/baseobjspace.py @@ -754,7 +754,12 @@ """Unpack an iterable object into a real (interpreter-level) list. Raise an OperationError(w_ValueError) if the length is wrong.""" w_iterator = self.iter(w_iterable) - items = [] + # If we know the expected length we can preallocate. + if expected_length == -1: + items = [] + else: + items = [None] * expected_length + idx = 0 while True: try: w_item = self.next(w_iterator) @@ -762,19 +767,22 @@ if not e.match(self, self.w_StopIteration): raise break # done - if expected_length != -1 and len(items) == expected_length: + if expected_length != -1 and idx == expected_length: raise OperationError(self.w_ValueError, self.wrap("too many values to unpack")) - items.append(w_item) - if expected_length != -1 and len(items) < expected_length: - i = len(items) - if i == 1: + if expected_length == -1: + items.append(w_item) + else: + items[idx] = w_item + idx += 1 + if expected_length != -1 and idx < expected_length: + if idx == 1: plural = "" else: plural = "s" raise OperationError(self.w_ValueError, self.wrap("need more than %d value%s to unpack" % - (i, plural))) + (idx, plural))) return items unpackiterable_unroll = jit.unroll_safe(func_with_new_name(unpackiterable, diff --git a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py --- a/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py +++ b/pypy/module/pypyjit/test_pypy_c/test_pypy_c_new.py @@ -1036,3 +1036,36 @@ --TICK-- jump(p0, p1, p2, p3, p4, i21, i6, i7, p8, p9, descr=) """) + + def test_unpack_iterable_non_list_tuple(self): + def main(n): + import array + + items = [array.array("i", [1])] * n + total = 0 + for a, in items: + total += a + return total + + log = self.run(main, [1000000]) + assert log.result == 1000000 + loop, = log.loops_by_filename(self.filepath) + assert loop.match(""" + i16 = int_ge(i12, i13) + guard_false(i16, descr=) + p17 = getarrayitem_gc(p15, i12, descr=) + i19 = int_add(i12, 1) + setfield_gc(p4, i19, descr=) + guard_nonnull_class(p17, 146982464, descr=) + i21 = getfield_gc(p17, descr=) + i23 = int_lt(0, i21) + guard_true(i23, descr=) + i24 = getfield_gc(p17, descr=) + i25 = getarrayitem_raw(i24, 0, descr=) + i27 = int_lt(1, i21) + guard_false(i27, descr=) + i28 = int_add_ovf(i10, i25) + guard_no_overflow(descr=) + --TICK-- + jump(p0, p1, p2, p3, p4, p5, p6, p7, p8, p9, i28, i25, i19, i13, p14, p15, descr=) + """) \ No newline at end of file From commits-noreply at bitbucket.org Mon Mar 28 01:45:49 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Mon, 28 Mar 2011 01:45:49 +0200 (CEST) Subject: [pypy-svn] pypy default: math.is{inf,nan} are purefunctions Message-ID: <20110327234549.AB597282BA1@codespeak.net> Author: Alex Gaynor Branch: Changeset: r42991:498a6a982a94 Date: 2011-03-27 19:45 -0400 http://bitbucket.org/pypy/pypy/changeset/498a6a982a94/ Log: math.is{inf,nan} are purefunctions diff --git a/pypy/rpython/lltypesystem/module/ll_math.py b/pypy/rpython/lltypesystem/module/ll_math.py --- a/pypy/rpython/lltypesystem/module/ll_math.py +++ b/pypy/rpython/lltypesystem/module/ll_math.py @@ -6,7 +6,7 @@ from pypy.rpython.lltypesystem import lltype, rffi from pypy.tool.sourcetools import func_with_new_name from pypy.tool.autopath import pypydir -from pypy.rlib import rposix +from pypy.rlib import jit, rposix from pypy.translator.tool.cbuild import ExternalCompilationInfo from pypy.rlib.rfloat import isinf, isnan, INFINITY, NAN @@ -91,11 +91,11 @@ # # Custom implementations - + at jit.purefunction def ll_math_isnan(y): return bool(math_isnan(y)) - + at jit.purefunction def ll_math_isinf(y): return bool(math_isinf(y)) From commits-noreply at bitbucket.org Mon Mar 28 11:53:20 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Mon, 28 Mar 2011 11:53:20 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: fix the test now that we generate a call_release_gil instead of call_may_force Message-ID: <20110328095320.EAB8D282B9D@codespeak.net> Author: Antonio Cuni Branch: jitypes2 Changeset: r42992:c726890f903b Date: 2011-03-28 11:51 +0200 http://bitbucket.org/pypy/pypy/changeset/c726890f903b/ Log: fix the test now that we generate a call_release_gil instead of call_may_force diff --git a/pypy/module/pypyjit/test/test_pypy_c.py b/pypy/module/pypyjit/test/test_pypy_c.py --- a/pypy/module/pypyjit/test/test_pypy_c.py +++ b/pypy/module/pypyjit/test/test_pypy_c.py @@ -1346,7 +1346,7 @@ last_ops = [op.getopname() for op in call_function[-5:]] assert last_ops == ['force_token', 'setfield_gc', - 'call_may_force', + 'call_release_gil', 'guard_not_forced', 'guard_no_exception'] call = call_function[-3] From commits-noreply at bitbucket.org Mon Mar 28 13:13:31 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 13:13:31 +0200 (CEST) Subject: [pypy-svn] pypy default: Fix for failing tests. Message-ID: <20110328111331.1C44B36C20C@codespeak.net> Author: Armin Rigo Branch: Changeset: r42993:5281cb31026c Date: 2011-03-28 13:11 +0200 http://bitbucket.org/pypy/pypy/changeset/5281cb31026c/ Log: Fix for failing tests. diff --git a/pypy/rpython/lltypesystem/rffi.py b/pypy/rpython/lltypesystem/rffi.py --- a/pypy/rpython/lltypesystem/rffi.py +++ b/pypy/rpython/lltypesystem/rffi.py @@ -311,7 +311,8 @@ eci = ExternalCompilationInfo(post_include_bits=['#define PYPY_NO_OP()']) eci = eci.merge(compilation_info) return llexternal('PYPY_NO_OP', [], lltype.Void, - compilation_info=eci, sandboxsafe=True, _nowrapper=True) + compilation_info=eci, sandboxsafe=True, _nowrapper=True, + _callable=lambda: None) # ____________________________________________________________ # Few helpers for keeping callback arguments alive From commits-noreply at bitbucket.org Mon Mar 28 13:13:32 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 13:13:32 +0200 (CEST) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110328111332.72464282B9D@codespeak.net> Author: Armin Rigo Branch: Changeset: r42994:601862ed288e Date: 2011-03-28 13:12 +0200 http://bitbucket.org/pypy/pypy/changeset/601862ed288e/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 28 15:58:44 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 15:58:44 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: Kill the two unnecessary copies of rsi and rdi at the start of the function. Message-ID: <20110328135844.C12C636C20C@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42995:42f4c037a92b Date: 2011-03-27 13:32 +0200 http://bitbucket.org/pypy/pypy/changeset/42f4c037a92b/ Log: Kill the two unnecessary copies of rsi and rdi at the start of the function. diff --git a/pypy/translator/c/gcc/trackgcroot.py b/pypy/translator/c/gcc/trackgcroot.py --- a/pypy/translator/c/gcc/trackgcroot.py +++ b/pypy/translator/c/gcc/trackgcroot.py @@ -1648,8 +1648,8 @@ print >> output, """\ /* See description in asmgcroot.py */ .cfi_startproc - movq\t%rdi, %rdx\t/* 1st argument, which is the callback */ - movq\t%rsi, %rcx\t/* 2nd argument, which is gcrootanchor */ + /* %rdi is the 1st argument, which is the callback */ + /* %rsi is the 2nd argument, which is gcrootanchor */ movq\t%rsp, %rax\t/* my frame top address */ pushq\t%rax\t\t/* ASM_FRAMEDATA[8] */ pushq\t%rbp\t\t/* ASM_FRAMEDATA[7] */ @@ -1662,15 +1662,15 @@ /* Add this ASM_FRAMEDATA to the front of the circular linked */ /* list. Let's call it 'self'. */ - movq\t8(%rcx), %rax\t/* next = gcrootanchor->next */ + movq\t8(%rsi), %rax\t/* next = gcrootanchor->next */ pushq\t%rax\t\t\t\t/* self->next = next */ - pushq\t%rcx\t\t\t/* self->prev = gcrootanchor */ - movq\t%rsp, 8(%rcx)\t/* gcrootanchor->next = self */ + pushq\t%rsi\t\t\t/* self->prev = gcrootanchor */ + movq\t%rsp, 8(%rsi)\t/* gcrootanchor->next = self */ movq\t%rsp, 0(%rax)\t\t\t/* next->prev = self */ .cfi_def_cfa_offset 80\t/* 9 pushes + the retaddr = 80 bytes */ /* note: the Mac OS X 16 bytes aligment must be respected. */ - call\t*%rdx\t\t/* invoke the callback */ + call\t*%rdi\t\t/* invoke the callback */ /* Detach this ASM_FRAMEDATA from the circular linked list */ popq\t%rsi\t\t/* prev = self->prev */ @@ -1687,7 +1687,7 @@ popq\t%rcx\t\t/* ignored ASM_FRAMEDATA[8] */ /* the return value is the one of the 'call' above, */ - /* because %rax (and possibly %rdx) are unmodified */ + /* because %rax is unmodified */ ret .cfi_endproc """ From commits-noreply at bitbucket.org Mon Mar 28 15:58:48 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 15:58:48 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: Implement closing and reopening the stack. Message-ID: <20110328135848.0CE4F282BAD@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42996:58a292fcefa5 Date: 2011-03-28 15:57 +0200 http://bitbucket.org/pypy/pypy/changeset/58a292fcefa5/ Log: Implement closing and reopening the stack. diff --git a/pypy/jit/backend/llsupport/regalloc.py b/pypy/jit/backend/llsupport/regalloc.py --- a/pypy/jit/backend/llsupport/regalloc.py +++ b/pypy/jit/backend/llsupport/regalloc.py @@ -37,6 +37,11 @@ self.frame_depth += size return newloc + def reserve_location_in_frame(self, size): + frame_depth = self.frame_depth + self.frame_depth += size + return frame_depth + # abstract methods that need to be overwritten for specific assemblers @staticmethod def frame_pos(loc, type): diff --git a/pypy/jit/backend/llsupport/gc.py b/pypy/jit/backend/llsupport/gc.py --- a/pypy/jit/backend/llsupport/gc.py +++ b/pypy/jit/backend/llsupport/gc.py @@ -15,7 +15,6 @@ from pypy.jit.backend.llsupport.descr import GcCache, get_field_descr from pypy.jit.backend.llsupport.descr import GcPtrFieldDescr from pypy.jit.backend.llsupport.descr import get_call_descr -from pypy.rpython.memory.gctransform import asmgcroot # ____________________________________________________________ @@ -309,6 +308,7 @@ @rgc.no_collect def freeing_block(self, start, stop): + from pypy.rpython.memory.gctransform import asmgcroot # if [start:stop] is a raw block of assembler, then look up the # corresponding gcroot markers, and mark them as freed now in # self._gcmap by setting the 2nd address of every entry to NULL. diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -126,6 +126,7 @@ self.translate_support_code = translate_support_code # to be read/used by the assembler too self.jump_target_descr = None + self.close_stack_struct = 0 def _prepare(self, inputargs, operations): self.fm = X86FrameManager() @@ -801,6 +802,11 @@ self._consider_call(op, guard_op) def consider_call_release_gil(self, op, guard_op): + # first force the registers like eax into the stack, because of + # the initial call to _close_stack() + self.rm.before_call() + self.xrm.before_call() + # assert guard_op is not None self._consider_call(op, guard_op) diff --git a/pypy/jit/backend/x86/runner.py b/pypy/jit/backend/x86/runner.py --- a/pypy/jit/backend/x86/runner.py +++ b/pypy/jit/backend/x86/runner.py @@ -113,10 +113,11 @@ LLInterpreter.current_interpreter = prev_interpreter return res - @staticmethod def cast_ptr_to_int(x): adr = llmemory.cast_ptr_to_adr(x) return CPU386.cast_adr_to_int(adr) + cast_ptr_to_int._annspecialcase_ = 'specialize:arglltype(0)' + cast_ptr_to_int = staticmethod(cast_ptr_to_int) all_null_registers = lltype.malloc(rffi.LONGP.TO, 24, flavor='raw', zero=True, diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -505,6 +505,7 @@ POP_b = insn(rex_nw, '\x8F', orbyte(0<<3), stack_bp(1)) LEA_rb = insn(rex_w, '\x8D', register(1,8), stack_bp(2)) + LEA_rs = insn(rex_w, '\x8D', register(1,8), stack_sp(2)) LEA32_rb = insn(rex_w, '\x8D', register(1,8),stack_bp(2,force_32bits=True)) LEA_ra = insn(rex_w, '\x8D', register(1, 8), mem_reg_plus_scaled_reg_plus_const(2)) LEA_rm = insn(rex_w, '\x8D', register(1, 8), mem_reg_plus_const(2)) diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -127,6 +127,8 @@ if hasattr(gc_ll_descr, 'get_malloc_fixedsize_slowpath_addr'): self._build_malloc_fixedsize_slowpath() self._build_stack_check_slowpath() + if gc_ll_descr.gcrootmap: + self._build_close_stack() debug_start('jit-backend-counts') self.set_debug(have_debug_prints()) debug_stop('jit-backend-counts') @@ -274,6 +276,40 @@ rawstart = mc.materialize(self.cpu.asmmemmgr, []) self.stack_check_slowpath = rawstart + @staticmethod + def _close_stack(css): + # similar to trackgcroot.py:pypy_asm_stackwalk, first part + from pypy.rpython.memory.gctransform import asmgcroot + new = rffi.cast(asmgcroot.ASM_FRAMEDATA_HEAD_PTR, css) + next = asmgcroot.gcrootanchor.next + new.next = next + new.prev = asmgcroot.gcrootanchor + asmgcroot.gcrootanchor.next = new + next.prev = new + # XXX and now release the GIL + + @staticmethod + def _reopen_stack(css): + # similar to trackgcroot.py:pypy_asm_stackwalk, second part + from pypy.rpython.memory.gctransform import asmgcroot + # XXX first reacquire the GIL + old = rffi.cast(asmgcroot.ASM_FRAMEDATA_HEAD_PTR, css) + prev = old.prev + next = old.next + prev.next = next + next.prev = prev + + _CLOSESTACK_FUNC = lltype.Ptr(lltype.FuncType([rffi.LONGP], + lltype.Void)) + + def _build_close_stack(self): + closestack_func = llhelper(self._CLOSESTACK_FUNC, + self._close_stack) + reopenstack_func = llhelper(self._CLOSESTACK_FUNC, + self._reopen_stack) + self.closestack_addr = self.cpu.cast_ptr_to_int(closestack_func) + self.reopenstack_addr = self.cpu.cast_ptr_to_int(reopenstack_func) + def assemble_loop(self, inputargs, operations, looptoken, log): '''adds the following attributes to looptoken: _x86_loop_code (an integer giving an address) @@ -1832,7 +1868,75 @@ self.mc.CMP_bi(FORCE_INDEX_OFS, 0) self.implement_guard(guard_token, 'L') - genop_guard_call_release_gil = genop_guard_call_may_force + def genop_guard_call_release_gil(self, op, guard_op, guard_token, + arglocs, result_loc): + # first, close the stack in the sense of the asmgcc GC root tracker + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + if gcrootmap: + # note that regalloc.py used save_all_regs=True to save all + # registers, so we don't have to care about saving them (other + # than ebp) in the close_stack_struct + self.call_close_stack() + # do the call + faildescr = guard_op.getdescr() + fail_index = self.cpu.get_fail_descr_number(faildescr) + self.mc.MOV_bi(FORCE_INDEX_OFS, fail_index) + self.genop_call(op, arglocs, result_loc) + # then reopen the stack + if gcrootmap: + self.call_reopen_stack(result_loc) + # finally, the guard_not_forced + self.mc.CMP_bi(FORCE_INDEX_OFS, 0) + self.implement_guard(guard_token, 'L') + + def call_close_stack(self): + from pypy.rpython.memory.gctransform import asmgcroot + css = self._regalloc.close_stack_struct + if css == 0: + use_words = (2 + max(asmgcroot.INDEX_OF_EBP, + asmgcroot.FRAME_PTR) + 1) + pos = self._regalloc.fm.reserve_location_in_frame(use_words) + css = get_ebp_ofs(pos + use_words - 1) + self._regalloc.close_stack_struct = css + # The location where the future CALL will put its return address + # will be [ESP-WORD], so save that as the next frame's top address + self.mc.LEA_rs(eax.value, -WORD) # LEA EAX, [ESP-4] + frame_ptr = css + WORD * (2+asmgcroot.FRAME_PTR) + self.mc.MOV_br(frame_ptr, eax.value) # MOV [css.frame], EAX + # Save ebp + index_of_ebp = css + WORD * (2+asmgcroot.INDEX_OF_EBP) + self.mc.MOV_br(index_of_ebp, ebp.value) # MOV [css.ebp], EBP + # Call the closestack() function (also releasing the GIL) + if IS_X86_32: + reg = eax + elif IS_X86_64: + reg = edi + self.mc.LEA_rb(reg.value, css) + self._emit_call(imm(self.closestack_addr), [reg]) + + def call_reopen_stack(self, save_loc): + # save the previous result (eax/xmm0) into the stack temporarily + if isinstance(save_loc, RegLoc): + self._regalloc.reserve_param(save_loc.width//WORD) + if save_loc.is_xmm: + self.mc.MOVSD_sx(0, save_loc.value) + else: + self.mc.MOV_sr(0, save_loc.value) + # call the reopenstack() function (also reacquiring the GIL) + css = self._regalloc.close_stack_struct + assert css != 0 + if IS_X86_32: + reg = eax + elif IS_X86_64: + reg = edi + self.mc.LEA_rb(reg.value, css) + self._emit_call(imm(self.reopenstack_addr), [reg]) + # restore the result from the stack + if isinstance(save_loc, RegLoc): + if save_loc.is_xmm: + self.mc.MOVSD_xs(save_loc.value, 0) + else: + self.mc.MOV_rs(save_loc.value, 0) def genop_guard_call_assembler(self, op, guard_op, guard_token, arglocs, result_loc): @@ -2042,7 +2146,7 @@ # on 64-bits, 'tid' is a value that fits in 31 bits self.mc.MOV_mi((eax.value, 0), tid) self.mc.MOV(heap(nursery_free_adr), edx) - + genop_discard_list = [Assembler386.not_implemented_op_discard] * rop._LAST genop_list = [Assembler386.not_implemented_op] * rop._LAST genop_llong_list = {} From commits-noreply at bitbucket.org Mon Mar 28 15:58:48 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 15:58:48 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: merge heads Message-ID: <20110328135848.7CA07282BDE@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42997:13434937a514 Date: 2011-03-28 15:58 +0200 http://bitbucket.org/pypy/pypy/changeset/13434937a514/ Log: merge heads From commits-noreply at bitbucket.org Mon Mar 28 16:01:19 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 28 Mar 2011 16:01:19 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: no clue what I changed here Message-ID: <20110328140119.A75FD36C20C@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3435:b2183f02c264 Date: 2011-03-27 21:50 +0200 http://bitbucket.org/pypy/extradoc/changeset/b2183f02c264/ Log: no clue what I changed here diff --git a/talk/icooolps2011/benchmarks/benchmarks.gnumeric b/talk/icooolps2011/benchmarks/benchmarks.gnumeric index 6fde41d4cf7bb53c74b65b8279243590c39a4838..ffed6255af236a6598be62be330504def8250dbd GIT binary patch [cut] From commits-noreply at bitbucket.org Mon Mar 28 16:01:20 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 28 Mar 2011 16:01:20 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: old author block Message-ID: <20110328140120.A831336C20C@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3436:bd0fb934dc66 Date: 2011-03-27 21:50 +0200 http://bitbucket.org/pypy/extradoc/changeset/bd0fb934dc66/ Log: old author block diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -78,10 +78,16 @@ \title{Runtime Feedback in a Meta-Tracing JIT for Efficient Dynamic Languages} -\authorinfo{Carl Friedrich Bolz \and XXX} - {Heinrich-Heine-Universität Düsseldorf, STUPS Group, Germany +\authorinfo{Carl Friedrich Bolz$^a$ \and Antonio Cuni$^a$ \and Maciej Fijałkowski$^b$ \and Michael Leuschel$^a$ \and \\ + Samuele Pedroni$^c$ \and Armin Rigo$^a$} + {$^a$Heinrich-Heine-Universität Düsseldorf, STUPS Group, Germany + + $^b$merlinux GmbH, Hildesheim, Germany + + $^c$Open End, Göteborg, Sweden } - {cfbolz at gmx.de \and XXX} + {cfbolz at gmx.de \and anto.cuni at gmail.com \and fijal at merlinux.eu \and + leuschel at cs.uni-duesseldorf.de \and samuele.pedroni at gmail.com \and arigo at tunes.org} \conferenceinfo{ICOOOLPS}{'11 Lancaster, UK} \CopyrightYear{2011} diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 4685bf0da4320b5f3d58b2a19b666d02ae5a5fd4..7cf8f1aa252e4086b302f568eb7d8349ebff9ebb GIT binary patch [cut] From commits-noreply at bitbucket.org Mon Mar 28 16:01:21 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 28 Mar 2011 16:01:21 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: expand related work some more Message-ID: <20110328140121.42CDB36C20C@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3437:7c8c361fa5d6 Date: 2011-03-28 15:53 +0200 http://bitbucket.org/pypy/extradoc/changeset/7c8c361fa5d6/ Log: expand related work some more diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -226,7 +226,7 @@ started the interpreter is used; only the most frequently executed paths through the user program are turned into machine code. The tracing JIT tries to produce traces that correspond to loops in the traced program, but most tracing JITs now also -have support for tracing non-loops \cite{XXX}. +have support for tracing non-loops \cite{andreas_gal_incremental_2006}. Because the traces always correspond to a concrete execution they cannot contain any control flow splits. Therefore they encode the control flow @@ -246,8 +246,7 @@ Tracing through the execution of an interpreter has many advantages. It makes the tracer, its optimizers and backends reusable for a variety of languages. The language semantics do not need to be encoded into the JIT. Instead the tracer -just picks them up from the interpreter. XXX mention disadvantage of long -traces? +just picks them up from the interpreter. While the operations in a trace are those of the interpreter, the loops that are traced by the tracer are the loops in the @@ -857,7 +856,8 @@ Partial evaluation \cite{jones_partial_1993} tries to automatically transform interpreters into compilers using the second futamura projection -\cite{futamura_partial_1999}. XXX +\cite{futamura_partial_1999}. Given that classical partial evaluation works +strictly ahead of time, it inherently cannot support runtime feedback. An early attempt at building a general environment for implementing languages efficiently is described by Wolczko et. al. \cite{mario_wolczko_towards_1999}. @@ -871,22 +871,23 @@ \cite{rose_bytecodes_2009} that will be added to the JVM is supposed to make the implementation of dynamic languages on top of JVMs easier. The bytecode gives the language implementor control over how the JIT optimizes the language's -features. XXX +features and when optimized code needs to be deoptimized. XXX -%We already explored promotion in other context, such as earlier examples of -%promotion \cite{carl_friedrich_bolz_towards_????}, \cite{armin_rigo_jit_2007} - -\cite{chambers_efficient_1989} maps -\cite{hoelzle_optimizing_1994} Type feedback -\cite{hoelzle_optimizing_1991} PICs -find the web page of V8 about maps +We already explored promotion in other context, such as earlier versions of +PyPy's JIT \cite{armin_rigo_jit_2007} as well as a Prolog partial evaluator +\cite{carl_friedrich_bolz_towards_????}. Promotion is quite similar to +(polymorphic) inline caching and runtime type feedback techniques which were +first used in Smalltalk \cite{deutsch_efficient_1984} and SELF +\cite{hoelzle_optimizing_1991,hoelzle_optimizing_1994} implementations. +Promotion is more general because any information can be cached in line, not +just classes of method receivers. %is there anything about versions? smalltalks tend to clear their method caches %when new methods are added. self and java use dependency tracking and %deoptimization. this is better what we have above, because we need runtime %checks. mention out of line guard? -jruby used versions at some point, jvm-l mailing list discusses them +%jruby used versions at some point, jvm-l mailing list discusses them \section{Conclusion and Next Steps} diff --git a/talk/icooolps2011/paper.bib b/talk/icooolps2011/paper.bib --- a/talk/icooolps2011/paper.bib +++ b/talk/icooolps2011/paper.bib @@ -19,6 +19,20 @@ author = {Carl Friedrich Bolz and Michael Leuschel and Armin Rigo} }, + at inproceedings{deutsch_efficient_1984, + address = {Salt Lake City, Utah, United States}, + title = {Efficient implementation of the smalltalk-80 system}, + isbn = {0-89791-125-3}, + url = {http://portal.acm.org/citation.cfm?id=800017.800542}, + doi = {10.1145/800017.800542}, + abstract = {The Smalltalk-80* programming language includes dynamic storage allocation, full upward funargs, and universally polymorphic procedures; the Smalltalk-80 programming system features interactive execution with incremental compilation, and implementation portability. These features of modern programming systems are among the most difficult to implement efficiently, even individually. A new implementation of the Smalltalk-80 system, hosted on a small microprocessor-based computer, achieves high performance while retaining complete (object code) compatibility with existing implementations. This paper discusses the most significant optimization techniques developed over the course of the project, many of which are applicable to other languages. The key idea is to represent certain runtime state (both code and data) in more than one form, and to convert between forms when needed.}, + booktitle = {Proceedings of the 11th {ACM} {SIGACT-SIGPLAN} symposium on Principles of programming languages}, + publisher = {{ACM}}, + author = {L. Peter Deutsch and Allan M. Schiffman}, + year = {1984}, + pages = {297--302} +}, + @phdthesis{cuni_high_2010, title = {High performance implementation of Python for {CLI/.NET} with {JIT} compiler generation for dynamic languages.}, school = {Dipartimento di Informatica e Scienze {dell'Informazione,} University of Genova}, From commits-noreply at bitbucket.org Mon Mar 28 16:26:36 2011 From: commits-noreply at bitbucket.org (arigo) Date: Mon, 28 Mar 2011 16:26:36 +0200 (CEST) Subject: [pypy-svn] pypy jitypes2: Probably implement support for the GIL. It's just a few lines of code, Message-ID: <20110328142636.36F2F282B9D@codespeak.net> Author: Armin Rigo Branch: jitypes2 Changeset: r42998:96d5bdab2f44 Date: 2011-03-28 16:26 +0200 http://bitbucket.org/pypy/pypy/changeset/96d5bdab2f44/ Log: Probably implement support for the GIL. It's just a few lines of code, but it would take serious efforts to write a test for it :-( Instead I suppose we can just give up and write a test for the whole pypy-c. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -286,13 +286,28 @@ new.prev = asmgcroot.gcrootanchor asmgcroot.gcrootanchor.next = new next.prev = new - # XXX and now release the GIL + # and now release the GIL + before = rffi.aroundstate.before + # Store a flag (by abuse in new+2*WORD) that tells if we must + # call the "after" function or not. The issue is that the + # before/after fields can be set at a random point during the + # execution, and we should not call the "after" function if we + # did not call the "before" function. It works by assuming that + # before/after start out being None/None, and are later set (once + # only) to some pair of functions. + css[2] = int(bool(before)) + if before: + before() @staticmethod def _reopen_stack(css): + # first reacquire the GIL + if css[2]: + after = rffi.aroundstate.after + assert after + after() # similar to trackgcroot.py:pypy_asm_stackwalk, second part from pypy.rpython.memory.gctransform import asmgcroot - # XXX first reacquire the GIL old = rffi.cast(asmgcroot.ASM_FRAMEDATA_HEAD_PTR, css) prev = old.prev next = old.next From commits-noreply at bitbucket.org Mon Mar 28 19:18:35 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Mon, 28 Mar 2011 19:18:35 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: improve conclusion Message-ID: <20110328171835.6D0A1282B9D@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3438:26aeaf9d9171 Date: 2011-03-28 19:18 +0200 http://bitbucket.org/pypy/extradoc/changeset/26aeaf9d9171/ Log: improve conclusion diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -100,6 +100,7 @@ \begin{abstract} +XXX \end{abstract} @@ -890,17 +891,15 @@ %jruby used versions at some point, jvm-l mailing list discusses them -\section{Conclusion and Next Steps} +\section{Conclusion} In this paper we presented two hints that can be used in the source code of an -interpreter written with PyPy. They are used to influence what the optimizer -does with the trace. We also showed how a small but still relevant dynamic -object model can use these hints to no longer use any dictionary lookups after -tracing. Instead a number of guards are inserted into the trace to check whether -the assumptions about the objects are still true. This makes operations on -objects seriously faster. +interpreter written with PyPy. They give control over runtime feedback and +optimization to the language implementor. They are expressive enough for +building well-known virtual machine optimization techniques, such as maps and +inline caches. We believe that they are flexible enough to express a wide +variety of language semantics efficiently. -XXX \section*{Acknowledgements} diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 7cf8f1aa252e4086b302f568eb7d8349ebff9ebb..c791c59dc67bc872ef68e67fad0cc13427fb4a78 GIT binary patch [cut] From commits-noreply at bitbucket.org Tue Mar 29 00:18:56 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Tue, 29 Mar 2011 00:18:56 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: new task about max/min Message-ID: <20110328221856.DE92A36C20B@codespeak.net> Author: Alex Gaynor Branch: extradoc Changeset: r3439:cd1509c37f73 Date: 2011-03-28 18:18 -0400 http://bitbucket.org/pypy/extradoc/changeset/cd1509c37f73/ Log: new task about max/min diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -106,6 +106,9 @@ - let super() work with the method cache. +- turn max(x, y)/min(x, y) into MAXSD, MINSD instructions when x and y are + floats. + - xxx (find more examples :-) BACKEND TASKS From commits-noreply at bitbucket.org Tue Mar 29 08:19:15 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Tue, 29 Mar 2011 08:19:15 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: produce first part of the jumpargs from the VirtualState Message-ID: <20110329061915.DC31D282B8B@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r42999:757722930b2e Date: 2011-03-29 07:24 +0200 http://bitbucket.org/pypy/pypy/changeset/757722930b2e/ Log: produce first part of the jumpargs from the VirtualState diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -152,7 +152,7 @@ for i in range(len(values)): self.state[i].enum_forced_boxes(inputargs, seen_inputargs, values[i]) - return [a for a in inputargs if not isinstance(a, Const)] + return inputargs class VirtualStateAdder(resume.ResumeDataVirtualAdder): @@ -245,6 +245,8 @@ raise NotImplementedError def enum_forced_boxes(self, boxes, already_seen, value): + if self.level == LEVEL_CONSTANT: + return key = value.get_key_box() if key not in already_seen: boxes.append(value.force_box()) @@ -372,9 +374,10 @@ # This loop is equivalent to the main optimization loop in # Optimizer.propagate_all_forward for newop in loop_operations: + newop = inliner.inline_op(newop, clone=False) if newop.getopnum() == rop.JUMP: - newop.initarglist(inputargs) - newop = inliner.inline_op(newop, clone=False) + values = [self.getvalue(arg) for arg in newop.getarglist()] + newop.initarglist(virtual_state.make_inputargs(values)) #self.optimizer.first_optimization.propagate_forward(newop) self.optimizer.send_extra_operation(newop) diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -5150,8 +5150,7 @@ i4 = int_sub(i2, i1) jump(p1, i1, i2, i4) """ - #self.optimize_strunicode_loop(ops, expected, expected) - self.optimize_loop(ops, expected) + self.optimize_strunicode_loop(ops, expected, expected) def test_str_slice_len_surviving2(self): ops = """ From commits-noreply at bitbucket.org Tue Mar 29 08:19:17 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Tue, 29 Mar 2011 08:19:17 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: moved VirtualState handling into file of its own separating it (more or less) from the resume handling Message-ID: <20110329061917.7AB23282B8B@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43000:936aa44d46f9 Date: 2011-03-29 08:18 +0200 http://bitbucket.org/pypy/pypy/changeset/936aa44d46f9/ Log: moved VirtualState handling into file of its own separating it (more or less) from the resume handling diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -1,5 +1,5 @@ from pypy.jit.metainterp.optimizeopt.optimizer import * -from pypy.jit.metainterp.optimizeopt.virtualize import AbstractVirtualValue +from pypy.jit.metainterp.optimizeopt.virtualstate import VirtualStateAdder from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.jit.metainterp.compile import ResumeGuardDescr from pypy.jit.metainterp.resume import Snapshot @@ -128,131 +128,6 @@ self.snapshot_map[snapshot] = new_snapshot return new_snapshot -class VirtualState(object): - def __init__(self, state): - self.state = state - - def generalization_of(self, other): - assert len(self.state) == len(other.state) - for i in range(len(self.state)): - if not self.state[i].generalization_of(other.state[i]): - return False - return True - - def generate_guards(self, other, args, cpu, extra_guards): - assert len(self.state) == len(other.state) == len(args) - for i in range(len(self.state)): - self.state[i].generate_guards(other.state[i], args[i], - cpu, extra_guards) - - def make_inputargs(self, values): - assert len(values) == len(self.state) - inputargs = [] - seen_inputargs = {} - for i in range(len(values)): - self.state[i].enum_forced_boxes(inputargs, seen_inputargs, - values[i]) - return inputargs - - -class VirtualStateAdder(resume.ResumeDataVirtualAdder): - def __init__(self, optimizer): - self.fieldboxes = {} - self.optimizer = optimizer - self.info = {} - - def register_virtual_fields(self, keybox, fieldboxes): - self.fieldboxes[keybox] = fieldboxes - - def already_seen_virtual(self, keybox): - return keybox in self.fieldboxes - - def getvalue(self, box): - return self.optimizer.getvalue(box) - - def state(self, box): - value = self.getvalue(box) - box = value.get_key_box() - try: - info = self.info[box] - except KeyError: - if value.is_virtual(): - self.info[box] = info = value.make_virtual_info(self, None) - flds = self.fieldboxes[box] - info.fieldstate = [self.state(b) for b in flds] - else: - self.info[box] = info = self.make_not_virtual(value) - return info - - def get_virtual_state(self, jump_args): - for box in jump_args: - value = self.getvalue(box) - value.get_args_for_fail(self) - return VirtualState([self.state(box) for box in jump_args]) - - - def make_not_virtual(self, value): - return NotVirtualInfo(value) - -class NotVirtualInfo(resume.AbstractVirtualInfo): - def __init__(self, value): - self.known_class = value.known_class - self.level = value.level - if value.intbound is None: - self.intbound = IntBound(MININT, MAXINT) - else: - self.intbound = value.intbound.clone() - if value.is_constant(): - self.constbox = value.box - else: - self.constbox = None - - def generalization_of(self, other): - # XXX This will always retrace instead of forcing anything which - # might be what we want sometimes? - if not isinstance(other, NotVirtualInfo): - return False - if other.level < self.level: - return False - if self.level == LEVEL_CONSTANT: - if not self.constbox.same_constant(other.constbox): - return False - elif self.level == LEVEL_KNOWNCLASS: - if self.known_class != other.known_class: # FIXME: use issubclass? - return False - return self.intbound.contains_bound(other.intbound) - - def _generate_guards(self, other, box, cpu, extra_guards): - if not isinstance(other, NotVirtualInfo): - raise InvalidLoop - if self.level == LEVEL_KNOWNCLASS and \ - box.nonnull() and \ - self.known_class.same_constant(cpu.ts.cls_of_box(box)): - # Note: This is only a hint on what the class of box was - # during the trace. There are actually no guarentees that this - # box realy comes from a trace. The hint is used here to choose - # between either eimtting a guard_class and jumping to an - # excisting compiled loop or retracing the loop. Both - # alternatives will always generate correct behaviour, but - # performace will differ. - op = ResOperation(rop.GUARD_CLASS, [box, self.known_class], None) - extra_guards.append(op) - return - # Remaining cases are probably not interesting - raise InvalidLoop - if self.level == LEVEL_CONSTANT: - import pdb; pdb.set_trace() - raise NotImplementedError - - def enum_forced_boxes(self, boxes, already_seen, value): - if self.level == LEVEL_CONSTANT: - return - key = value.get_key_box() - if key not in already_seen: - boxes.append(value.force_box()) - already_seen[value.get_key_box()] = None - - class UnrollOptimizer(Optimization): """Unroll the loop into two iterations. The first one will become the preamble or entry bridge (don't think there is a diff --git a/pypy/jit/metainterp/resume.py b/pypy/jit/metainterp/resume.py --- a/pypy/jit/metainterp/resume.py +++ b/pypy/jit/metainterp/resume.py @@ -434,20 +434,6 @@ def debug_prints(self): raise NotImplementedError - - def generalization_of(self, other): - raise NotImplementedError - - def generate_guards(self, other, box, cpu, extra_guards): - if self.generalization_of(other): - return - self._generate_guards(other, box, cpu, extra_guards) - - def _generate_guards(self, other, box, cpu, extra_guards): - raise InvalidLoop - - def enum_forced_boxes(self, boxes, already_seen, value): - raise NotImplementedError class AbstractVirtualStructInfo(AbstractVirtualInfo): def __init__(self, fielddescrs): @@ -468,39 +454,6 @@ str(self.fielddescrs[i]), str(untag(self.fieldnums[i]))) - def generalization_of(self, other): - if not self._generalization_of(other): - return False - assert len(self.fielddescrs) == len(self.fieldstate) - assert len(other.fielddescrs) == len(other.fieldstate) - if len(self.fielddescrs) != len(other.fielddescrs): - return False - - for i in range(len(self.fielddescrs)): - if other.fielddescrs[i] is not self.fielddescrs[i]: - return False - if not self.fieldstate[i].generalization_of(other.fieldstate[i]): - return False - - return True - - def _generalization_of(self, other): - raise NotImplementedError - - def enum_forced_boxes(self, boxes, already_seen, value): - #FIXME: assert isinstance(value, AbstractVirtualStructValue) - key = value.get_key_box() - if key in already_seen: - return - already_seen[key] = None - if value.box is None: - for i in range(len(self.fielddescrs)): - v = value._fields[self.fielddescrs[i]] - self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) - else: - boxes.append(value.box) - - class VirtualInfo(AbstractVirtualStructInfo): def __init__(self, known_class, fielddescrs): AbstractVirtualStructInfo.__init__(self, fielddescrs) @@ -516,13 +469,6 @@ debug_print("\tvirtualinfo", self.known_class.repr_rpython()) AbstractVirtualStructInfo.debug_prints(self) - def _generalization_of(self, other): - if not isinstance(other, VirtualInfo): - return False - if not self.known_class.same_constant(other.known_class): - return False - return True - class VStructInfo(AbstractVirtualStructInfo): def __init__(self, typedescr, fielddescrs): @@ -539,14 +485,6 @@ debug_print("\tvstructinfo", self.typedescr.repr_rpython()) AbstractVirtualStructInfo.debug_prints(self) - def _generalization_of(self, other): - if not isinstance(other, VStructInfo): - return False - if self.typedescr is not other.typedescr: - return False - return True - - class VArrayInfo(AbstractVirtualInfo): def __init__(self, arraydescr): self.arraydescr = arraydescr @@ -578,32 +516,6 @@ for i in self.fieldnums: debug_print("\t\t", str(untag(i))) - def generalization_of(self, other): - if self.arraydescr is not other.arraydescr: - return False - if len(self.fieldstate) != len(other.fieldstate): - return False - for i in range(len(self.fieldstate)): - if not self.fieldstate[i].generalization_of(other.fieldstate[i]): - return False - return True - - def enum_forced_boxes(self, boxes, already_seen, value): - # FIXME: assert isinstance(value, VArrayValue) - key = value.get_key_box() - if key in already_seen: - return - already_seen[key] = None - if value.box is None: - for i in range(len(self.fieldstate)): - v = value._items[i] - self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) - else: - boxes.append(value.box) - - - - class VStrPlainInfo(AbstractVirtualInfo): """Stands for the string made out of the characters of all fieldnums.""" diff --git a/pypy/jit/metainterp/optimizeopt/virtualstate.py b/pypy/jit/metainterp/optimizeopt/virtualstate.py new file mode 100644 --- /dev/null +++ b/pypy/jit/metainterp/optimizeopt/virtualstate.py @@ -0,0 +1,245 @@ +from pypy.jit.metainterp import resume +from pypy.jit.metainterp.optimizeopt import virtualize +from pypy.jit.metainterp.optimizeopt.optimizer import LEVEL_CONSTANT, \ + LEVEL_KNOWNCLASS, \ + MININT, MAXINT +from pypy.jit.metainterp.optimizeutil import InvalidLoop +from pypy.jit.metainterp.optimizeopt.intutils import IntBound +from pypy.jit.metainterp.resoperation import rop, ResOperation + +class AbstractVirtualStateInfo(object): + def generalization_of(self, other): + raise NotImplementedError + + def generate_guards(self, other, box, cpu, extra_guards): + if self.generalization_of(other): + return + self._generate_guards(other, box, cpu, extra_guards) + + def _generate_guards(self, other, box, cpu, extra_guards): + raise InvalidLoop + + def enum_forced_boxes(self, boxes, already_seen, value): + raise NotImplementedError + +class AbstractVirtualStructStateInfo(AbstractVirtualStateInfo): + def __init__(self, fielddescrs): + self.fielddescrs = fielddescrs + + def generalization_of(self, other): + if not self._generalization_of(other): + return False + assert len(self.fielddescrs) == len(self.fieldstate) + assert len(other.fielddescrs) == len(other.fieldstate) + if len(self.fielddescrs) != len(other.fielddescrs): + return False + + for i in range(len(self.fielddescrs)): + if other.fielddescrs[i] is not self.fielddescrs[i]: + return False + if not self.fieldstate[i].generalization_of(other.fieldstate[i]): + return False + + return True + + def _generalization_of(self, other): + raise NotImplementedError + + def enum_forced_boxes(self, boxes, already_seen, value): + assert isinstance(value, virtualize.AbstractVirtualStructValue) + key = value.get_key_box() + if key in already_seen: + return + already_seen[key] = None + if value.box is None: + for i in range(len(self.fielddescrs)): + v = value._fields[self.fielddescrs[i]] + self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) + else: + boxes.append(value.box) + +class VirtualStateInfo(AbstractVirtualStructStateInfo): + def __init__(self, known_class, fielddescrs): + AbstractVirtualStructStateInfo.__init__(self, fielddescrs) + self.known_class = known_class + + def _generalization_of(self, other): + if not isinstance(other, VirtualStateInfo): + return False + if not self.known_class.same_constant(other.known_class): + return False + return True + +class VStructStateInfo(AbstractVirtualStructStateInfo): + def __init__(self, typedescr, fielddescrs): + AbstractVirtualStructStateInfo.__init__(self, fielddescrs) + self.typedescr = typedescr + + def _generalization_of(self, other): + if not isinstance(other, VStructStateInfo): + return False + if self.typedescr is not other.typedescr: + return False + return True + +class VArrayStateInfo(AbstractVirtualStateInfo): + def __init__(self, arraydescr): + self.arraydescr = arraydescr + + def generalization_of(self, other): + if self.arraydescr is not other.arraydescr: + return False + if len(self.fieldstate) != len(other.fieldstate): + return False + for i in range(len(self.fieldstate)): + if not self.fieldstate[i].generalization_of(other.fieldstate[i]): + return False + return True + + def enum_forced_boxes(self, boxes, already_seen, value): + assert isinstance(value, virtualize.VArrayValue) + key = value.get_key_box() + if key in already_seen: + return + already_seen[key] = None + if value.box is None: + for i in range(len(self.fieldstate)): + v = value._items[i] + self.fieldstate[i].enum_forced_boxes(boxes, already_seen, v) + else: + boxes.append(value.box) + +class NotVirtualStateInfo(AbstractVirtualStateInfo): + def __init__(self, value): + self.known_class = value.known_class + self.level = value.level + if value.intbound is None: + self.intbound = IntBound(MININT, MAXINT) + else: + self.intbound = value.intbound.clone() + if value.is_constant(): + self.constbox = value.box + else: + self.constbox = None + + def generalization_of(self, other): + # XXX This will always retrace instead of forcing anything which + # might be what we want sometimes? + if not isinstance(other, NotVirtualStateInfo): + return False + if other.level < self.level: + return False + if self.level == LEVEL_CONSTANT: + if not self.constbox.same_constant(other.constbox): + return False + elif self.level == LEVEL_KNOWNCLASS: + if self.known_class != other.known_class: # FIXME: use issubclass? + return False + return self.intbound.contains_bound(other.intbound) + + def _generate_guards(self, other, box, cpu, extra_guards): + if not isinstance(other, NotVirtualStateInfo): + raise InvalidLoop + if self.level == LEVEL_KNOWNCLASS and \ + box.nonnull() and \ + self.known_class.same_constant(cpu.ts.cls_of_box(box)): + # Note: This is only a hint on what the class of box was + # during the trace. There are actually no guarentees that this + # box realy comes from a trace. The hint is used here to choose + # between either eimtting a guard_class and jumping to an + # excisting compiled loop or retracing the loop. Both + # alternatives will always generate correct behaviour, but + # performace will differ. + op = ResOperation(rop.GUARD_CLASS, [box, self.known_class], None) + extra_guards.append(op) + return + # Remaining cases are probably not interesting + raise InvalidLoop + if self.level == LEVEL_CONSTANT: + import pdb; pdb.set_trace() + raise NotImplementedError + + def enum_forced_boxes(self, boxes, already_seen, value): + if self.level == LEVEL_CONSTANT: + return + key = value.get_key_box() + if key not in already_seen: + boxes.append(value.force_box()) + already_seen[value.get_key_box()] = None + + +class VirtualState(object): + def __init__(self, state): + self.state = state + + def generalization_of(self, other): + assert len(self.state) == len(other.state) + for i in range(len(self.state)): + if not self.state[i].generalization_of(other.state[i]): + return False + return True + + def generate_guards(self, other, args, cpu, extra_guards): + assert len(self.state) == len(other.state) == len(args) + for i in range(len(self.state)): + self.state[i].generate_guards(other.state[i], args[i], + cpu, extra_guards) + + def make_inputargs(self, values): + assert len(values) == len(self.state) + inputargs = [] + seen_inputargs = {} + for i in range(len(values)): + self.state[i].enum_forced_boxes(inputargs, seen_inputargs, + values[i]) + return inputargs + + +class VirtualStateAdder(resume.ResumeDataVirtualAdder): + def __init__(self, optimizer): + self.fieldboxes = {} + self.optimizer = optimizer + self.info = {} + + def register_virtual_fields(self, keybox, fieldboxes): + self.fieldboxes[keybox] = fieldboxes + + def already_seen_virtual(self, keybox): + return keybox in self.fieldboxes + + def getvalue(self, box): + return self.optimizer.getvalue(box) + + def state(self, box): + value = self.getvalue(box) + box = value.get_key_box() + try: + info = self.info[box] + except KeyError: + if value.is_virtual(): + self.info[box] = info = value.make_virtual_info(self, None) + flds = self.fieldboxes[box] + info.fieldstate = [self.state(b) for b in flds] + else: + self.info[box] = info = self.make_not_virtual(value) + return info + + def get_virtual_state(self, jump_args): + for box in jump_args: + value = self.getvalue(box) + value.get_args_for_fail(self) + return VirtualState([self.state(box) for box in jump_args]) + + + def make_not_virtual(self, value): + return NotVirtualStateInfo(value) + + def make_virtual(self, known_class, fielddescrs): + return VirtualStateInfo(known_class, fielddescrs) + + def make_vstruct(self, typedescr, fielddescrs): + return VStructStateInfo(typedescr, fielddescrs) + + def make_varray(self, arraydescr): + return VArrayStateInfo(arraydescr) + From commits-noreply at bitbucket.org Tue Mar 29 08:49:46 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Tue, 29 Mar 2011 08:49:46 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: rpythonized Message-ID: <20110329064946.4C0C5282B8B@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43001:d2a2e9b34ed5 Date: 2011-03-29 08:49 +0200 http://bitbucket.org/pypy/pypy/changeset/d2a2e9b34ed5/ Log: rpythonized diff --git a/pypy/jit/metainterp/optimizeopt/virtualstate.py b/pypy/jit/metainterp/optimizeopt/virtualstate.py --- a/pypy/jit/metainterp/optimizeopt/virtualstate.py +++ b/pypy/jit/metainterp/optimizeopt/virtualstate.py @@ -7,7 +7,7 @@ from pypy.jit.metainterp.optimizeopt.intutils import IntBound from pypy.jit.metainterp.resoperation import rop, ResOperation -class AbstractVirtualStateInfo(object): +class AbstractVirtualStateInfo(resume.AbstractVirtualInfo): def generalization_of(self, other): raise NotImplementedError From commits-noreply at bitbucket.org Tue Mar 29 10:25:52 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Tue, 29 Mar 2011 10:25:52 +0200 (CEST) Subject: [pypy-svn] pypy guard-improvements: typo Message-ID: <20110329082552.84BE4282B8B@codespeak.net> Author: Hakan Ardo Branch: guard-improvements Changeset: r43002:5bcced77d7bd Date: 2011-02-17 14:18 +0100 http://bitbucket.org/pypy/pypy/changeset/5bcced77d7bd/ Log: typo diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -584,7 +584,7 @@ guard_false(i1) [] jump(i0) """ - self.optimize_loop(ops, 'Not', expected) + self.optimize_loop(ops, expected) def test_ooisnull_oononnull_2(self): ops = """ From commits-noreply at bitbucket.org Tue Mar 29 10:25:52 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Tue, 29 Mar 2011 10:25:52 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: hg merge default Message-ID: <20110329082552.C1C4E282BA1@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43003:781d7497e35b Date: 2011-03-29 10:08 +0200 http://bitbucket.org/pypy/pypy/changeset/781d7497e35b/ Log: hg merge default From commits-noreply at bitbucket.org Tue Mar 29 16:45:08 2011 From: commits-noreply at bitbucket.org (arigo) Date: Tue, 29 Mar 2011 16:45:08 +0200 (CEST) Subject: [pypy-svn] pypy out-of-line-guards: Uh, this was checked in with "<<<<<<< local" markers. Message-ID: <20110329144508.CF787282BAD@codespeak.net> Author: Armin Rigo Branch: out-of-line-guards Changeset: r43004:51688cdad658 Date: 2011-03-29 16:44 +0200 http://bitbucket.org/pypy/pypy/changeset/51688cdad658/ Log: Uh, this was checked in with "<<<<<<< local" markers. Overwrite the resulting mess with the trunk's version. diff --git a/.hgignore b/.hgignore --- a/.hgignore +++ b/.hgignore @@ -1,11 +1,3 @@ -<<<<<<< local -.*\.pyc -pypy/_cache -.*~ -pypy/module/cpyext/src/.*.o -compiled/ -include/ -lib_pypy/ctypes_config_cache======= syntax: glob *.py[co] *~ @@ -21,6 +13,7 @@ ^pypy/module/cpyext/test/.+\.o$ ^pypy/module/cpyext/test/.+\.obj$ ^pypy/module/cpyext/test/.+\.manifest$ +^pypy/module/test_lib_pypy/ctypes_tests/.+\.o$ ^pypy/doc/.+\.html$ ^pypy/doc/basicblock\.asc$ ^pypy/doc/.+\.svninfo$ @@ -58,6 +51,7 @@ ^pypy/doc/discussion/.+\.html$ ^include/.+\.h$ ^include/.+\.inl$ +^pypy/doc/_build/.*$ ^pypy/doc/config/.+\.html$ ^pypy/doc/config/style\.css$ ^pypy/doc/jit/.+\.html$ @@ -67,4 +61,6 @@ ^pypy/doc/image/lattice3\.png$ ^pypy/doc/image/stackless_informal\.png$ ^pypy/doc/image/parsing_example.+\.png$ -^compiled>>>>>>> other +^compiled +^.git/ +^release/ From commits-noreply at bitbucket.org Wed Mar 30 08:19:25 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 30 Mar 2011 08:19:25 +0200 (CEST) Subject: [pypy-svn] pypy jit-str_in_preamble: hg merge jit-short_from_state Message-ID: <20110330061925.BCD32282BDF@codespeak.net> Author: Hakan Ardo Branch: jit-str_in_preamble Changeset: r43005:1140c509ba27 Date: 2011-03-29 16:35 +0200 http://bitbucket.org/pypy/pypy/changeset/1140c509ba27/ Log: hg merge jit-short_from_state From commits-noreply at bitbucket.org Wed Mar 30 08:19:26 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 30 Mar 2011 08:19:26 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: already perfomred by the reconstruction Message-ID: <20110330061926.A8772282BDF@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43006:6be04713616e Date: 2011-03-29 16:42 +0200 http://bitbucket.org/pypy/pypy/changeset/6be04713616e/ Log: already perfomred by the reconstruction diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -239,10 +239,6 @@ def inline(self, loop_operations, loop_args, jump_args, virtual_state): self.inliner = inliner = Inliner(loop_args, jump_args) - # FIXME: Move this to reconstruct - for v in self.optimizer.values.values(): - v.last_guard_index = -1 # FIXME: Are there any more indexes stored? - values = [self.getvalue(arg) for arg in jump_args] inputargs = virtual_state.make_inputargs(values) From commits-noreply at bitbucket.org Wed Mar 30 08:19:27 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 30 Mar 2011 08:19:27 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: moved loop_invariant_results to rewrite Message-ID: <20110330061927.C36A2282BDF@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43007:13da0acdf169 Date: 2011-03-29 18:43 +0200 http://bitbucket.org/pypy/pypy/changeset/13da0acdf169/ Log: moved loop_invariant_results to rewrite diff --git a/pypy/jit/metainterp/optimizeopt/rewrite.py b/pypy/jit/metainterp/optimizeopt/rewrite.py --- a/pypy/jit/metainterp/optimizeopt/rewrite.py +++ b/pypy/jit/metainterp/optimizeopt/rewrite.py @@ -12,10 +12,16 @@ """Rewrite operations into equivalent, cheaper operations. This includes already executed operations and constants. """ + def __init__(self): + self.loop_invariant_results = {} def reconstruct_for_next_iteration(self, surviving_boxes, optimizer, valuemap): - return OptRewrite() + new = OptRewrite() + for key, value in self.loop_invariant_results.items(): + new.loop_invariant_results[key] = \ + value.get_cloned(new, valuemap) + return new def propagate_forward(self, op): args = self.optimizer.make_args_key(op) @@ -292,7 +298,7 @@ # expects a compile-time constant assert isinstance(arg, Const) key = make_hashable_int(arg.getint()) - resvalue = self.optimizer.loop_invariant_results.get(key, None) + resvalue = self.loop_invariant_results.get(key, None) if resvalue is not None: self.make_equal_to(op.result, resvalue) return @@ -301,7 +307,7 @@ op = op.copy_and_change(rop.CALL) self.emit_operation(op) resvalue = self.getvalue(op.result) - self.optimizer.loop_invariant_results[key] = resvalue + self.loop_invariant_results[key] = resvalue def _optimize_nullness(self, op, box, expect_nonnull): value = self.getvalue(box) diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -265,7 +265,6 @@ self.interned_refs = self.cpu.ts.new_ref_dict() self.resumedata_memo = resume.ResumeDataLoopMemo(metainterp_sd) self.bool_boxes = {} - self.loop_invariant_results = {} self.pure_operations = args_dict() self.producer = {} self.pendingfields = [] @@ -310,17 +309,11 @@ for o in self.optimizations] new.set_optimizations(optimizations) - new.interned_refs = self.interned_refs - new.bool_boxes = {} + new.interned_refs = self.interned_refs # Constants + new.bool_boxes = {} # Flags values as bools for value in new.bool_boxes.keys(): new.bool_boxes[value.get_cloned(new, valuemap)] = None - # FIXME: Move to rewrite.py - new.loop_invariant_results = {} - for key, value in self.loop_invariant_results.items(): - new.loop_invariant_results[key] = \ - value.get_cloned(new, valuemap) - new.pure_operations = self.pure_operations new.producer = self.producer assert self.posponedop is None From commits-noreply at bitbucket.org Wed Mar 30 08:19:30 2011 From: commits-noreply at bitbucket.org (hakanardo) Date: Wed, 30 Mar 2011 08:19:30 +0200 (CEST) Subject: [pypy-svn] pypy jit-short_from_state: Extracting boxes suitable for short preamble. Only considering pure operation so far Message-ID: <20110330061930.69418282BDF@codespeak.net> Author: Hakan Ardo Branch: jit-short_from_state Changeset: r43008:03e22e9ee2f1 Date: 2011-03-30 07:58 +0200 http://bitbucket.org/pypy/pypy/changeset/03e22e9ee2f1/ Log: Extracting boxes suitable for short preamble. Only considering pure operation so far diff --git a/pypy/jit/metainterp/optimizeopt/unroll.py b/pypy/jit/metainterp/optimizeopt/unroll.py --- a/pypy/jit/metainterp/optimizeopt/unroll.py +++ b/pypy/jit/metainterp/optimizeopt/unroll.py @@ -162,6 +162,11 @@ modifier = VirtualStateAdder(self.optimizer) virtual_state = modifier.get_virtual_state(jump_args) + values = [self.getvalue(arg) for arg in jump_args] + inputargs = virtual_state.make_inputargs(values) + short_boxes = preamble_optimizer.produce_short_preamble_ops(inputargs) + print short_boxes + try: inputargs = self.inline(self.cloned_operations, loop.inputargs, jump_args, diff --git a/pypy/jit/metainterp/optimizeopt/heap.py b/pypy/jit/metainterp/optimizeopt/heap.py --- a/pypy/jit/metainterp/optimizeopt/heap.py +++ b/pypy/jit/metainterp/optimizeopt/heap.py @@ -119,6 +119,7 @@ self._lazy_setfields = [] # cached array items: {descr: CachedArrayItems} self.cached_arrayitems = {} + self.original_producer = {} def reconstruct_for_next_iteration(self, surviving_boxes, optimizer, valuemap): diff --git a/pypy/jit/metainterp/optimizeopt/optimizer.py b/pypy/jit/metainterp/optimizeopt/optimizer.py --- a/pypy/jit/metainterp/optimizeopt/optimizer.py +++ b/pypy/jit/metainterp/optimizeopt/optimizer.py @@ -254,6 +254,11 @@ #return self.__class__() raise NotImplementedError + def produce_potential_short_preamble_ops(self, potential_ops): + pass + +class BoxNotProducable(Exception): + pass class Optimizer(Optimization): @@ -266,6 +271,7 @@ self.resumedata_memo = resume.ResumeDataLoopMemo(metainterp_sd) self.bool_boxes = {} self.pure_operations = args_dict() + self.emitted_pure_operations = {} self.producer = {} self.pendingfields = [] self.posponedop = None @@ -302,6 +308,7 @@ assert valuemap is None if surviving_boxes is None: surviving_boxes = [] + valuemap = {} new = Optimizer(self.metainterp_sd, self.loop) optimizations = [o.reconstruct_for_next_iteration(surviving_boxes, @@ -325,9 +332,45 @@ force_if_needed=force) if value is not None: new.values[box] = value - + return new + def produce_potential_short_preamble_ops(self, potential_ops): + for op in self.emitted_pure_operations: + potential_ops[op.result] = op + for opt in self.optimizations: + opt.produce_potential_short_preamble_ops(potential_ops) + + def produce_short_preamble_ops(self, inputargs): + potential_ops = {} + self.produce_potential_short_preamble_ops(potential_ops) + + short_boxes = {} + for box in inputargs: + short_boxes[box] = None + for box in potential_ops.keys(): + try: + self.produce_short_preamble_box(box, short_boxes, + potential_ops) + except BoxNotProducable: + pass + return short_boxes + + def produce_short_preamble_box(self, box, short_boxes, potential_ops): + if box in short_boxes: + return + if self.getvalue(box).is_constant(): + return + if box in potential_ops: + op = potential_ops[box] + for arg in op.getarglist(): + arg = self.getvalue(arg).get_key_box() + self.produce_short_preamble_box(arg, short_boxes, + potential_ops) + short_boxes[box] = op + else: + raise BoxNotProducable + def turned_constant(self, value): for o in self.optimizations: o.turned_constant(value) @@ -548,6 +591,7 @@ return else: self.pure_operations[args] = op + self.emitted_pure_operations[op] = True # otherwise, the operation remains self.emit_operation(op) From commits-noreply at bitbucket.org Wed Mar 30 11:58:23 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 30 Mar 2011 11:58:23 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: add some benchmarks results, of course now we are over the page limit Message-ID: <20110330095823.E469B282BDA@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3440:c5221072e72b Date: 2011-03-30 11:57 +0200 http://bitbucket.org/pypy/extradoc/changeset/c5221072e72b/ Log: add some benchmarks results, of course now we are over the page limit diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -120,7 +120,7 @@ renaissance of this idea around the approach of tracing just-in-time compilers. A number of projects have attempted this approach. SPUR \cite{bebenita_spur:_2010} is a tracing JIT for .NET together with a JavaScript implementation in C\#. PyPy -\cite{armin_rigo_pypys_2006} contains a tracing JIT for RPython (a restricted +\cite{armin_rigo_pypys_2006} contains a tracing JIT for Python (a restricted subset of Python). This JIT is then used to trace a number of languages implementations written in RPython. A number of other experiments in this directions were done, such as an interpreter for Lua in JavaScript, which is run @@ -826,6 +826,59 @@ \section{Evaluation} \label{sec:evaluation} +For space reasons we cannot perform a full evaluation here, but still want to +present some benchmark numbers. We chose to present two benchmarks, a port of +the classical Richards benchmark in RPython and a Python version of the Telco +decimal benchmark\footnote{\texttt{http://speleotrove.com/decimal/telco.html}}, +using a pure Python decimal floating point implementation. + +The benchmarks were run on an otherwise idle Intel Core2 Duo P8400 processor +with 2.26 GHz and 3072 KB of cache on a machine with 3GB RAM running Linux +2.6.35. We compared the performance of various Python implementations on the +benchmarks. As a baseline, we used the standard Python implementation in C, +CPython 2.6.6\footnote{\texttt{http://python.org}}, which uses a bytecode-based +interpreter. We compare it against four versions of PyPy's Python interpreter, +all of them with JIT enabled. The PyPy baseline does not enable maps or type +versions. Then we have a version each where maps and versions are enabled alone +and finally a version with both. + +All benchmarks were run 50 times in the same process, to give the JIT time to +produce machine code. The arithmetic mean of the times of the last 30 runs were +used as the result. The errors were computed using a confidence interval with a +95\% confidence level \cite{georges_statistically_2007}. The results are +reported in Figure~\ref{fig:times}. + +XXX analysis + +\begin{figure} +{\footnotesize +\begin{center} +\begin{tabular}{|l|r|r|} +\hline + &richards[ms] &telco[ms] \\ +\hline +CPython &357.79 $\pm$ 1.32 &1209.67 $\pm$ 2.20\\ +speedup &1.00 $\times$ &1.00 $\times$\\ +\hline +JIT baseline &421.87 $\pm$ 0.48 &738.18 $\pm$ 3.29\\ +speedup &0.85 $\times$ &1.64 $\times$\\ +\hline +JIT map &382.88 $\pm$ 4.40 &834.19 $\pm$ 4.91\\ +speedup &0.93 $\times$ &1.45 $\times$\\ +\hline +JIT version &49.87 $\pm$ 0.29 &157.88 $\pm$ 1.79 \\ +speedup &7.17 $\times$ &7.66 $\times$\\ +\hline +JIT full &17.89 $\pm$ 1.15 &153.48 $\pm$ 1.86 \\ +speedup &20.00 $\times$ &7.88 $\times$\\ +\hline +\end{tabular} +\end{center} +} +\caption{Benchmark Results} +\label{fig:times} +\end{figure} + \section{Related Work} The very first meta-tracer is described by Sullivan et. al. diff --git a/talk/icooolps2011/benchmarks/benchmarks.gnumeric b/talk/icooolps2011/benchmarks/benchmarks.gnumeric index ffed6255af236a6598be62be330504def8250dbd..a99159c250d8bc2c132073789433f2dcf36ed834 GIT binary patch [cut] diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index c791c59dc67bc872ef68e67fad0cc13427fb4a78..18cc2e04014e1ad1268d5c358d54050e5befe84a GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 13:36:07 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 13:36:07 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: Added tag benchmarks for changeset 370c23f085d7 Message-ID: <20110330113607.4C962282BDF@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43009:5b7363ffa818 Date: 2011-03-04 11:37 +0100 http://bitbucket.org/pypy/pypy/changeset/5b7363ffa818/ Log: Added tag benchmarks for changeset 370c23f085d7 diff --git a/.hgtags b/.hgtags new file mode 100644 --- /dev/null +++ b/.hgtags @@ -0,0 +1,1 @@ +370c23f085d7673e33286a1088a968817d98570a benchmarks From commits-noreply at bitbucket.org Wed Mar 30 13:36:08 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 13:36:08 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: remove some unused parameters Message-ID: <20110330113608.38443282BDF@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43010:48603a036b4a Date: 2011-03-26 16:13 +0100 http://bitbucket.org/pypy/pypy/changeset/48603a036b4a/ Log: remove some unused parameters diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py --- a/pypy/jit/backend/arm/opassembler.py +++ b/pypy/jit/backend/arm/opassembler.py @@ -83,9 +83,9 @@ emit_op_int_and = gen_emit_op_ri('AND') emit_op_int_or = gen_emit_op_ri('ORR') emit_op_int_xor = gen_emit_op_ri('EOR') - emit_op_int_lshift = gen_emit_op_ri('LSL', imm_size=0x1F, allow_zero=False, commutative=False) - emit_op_int_rshift = gen_emit_op_ri('ASR', imm_size=0x1F, allow_zero=False, commutative=False) - emit_op_uint_rshift = gen_emit_op_ri('LSR', imm_size=0x1F, allow_zero=False, commutative=False) + emit_op_int_lshift = gen_emit_op_ri('LSL') + emit_op_int_rshift = gen_emit_op_ri('ASR') + emit_op_uint_rshift = gen_emit_op_ri('LSR') emit_op_int_lt = gen_emit_cmp_op(c.LT) emit_op_int_le = gen_emit_cmp_op(c.LE) diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -13,7 +13,7 @@ return fcond return f -def gen_emit_op_ri(opname, imm_size=0xFF, commutative=True, allow_zero=True): +def gen_emit_op_ri(opname): ri_op = getattr(AbstractARMv7Builder, '%s_ri' % opname) rr_op = getattr(AbstractARMv7Builder, '%s_rr' % opname) def f(self, op, arglocs, regalloc, fcond): @@ -30,9 +30,15 @@ helper = getattr(AbstractARMv7Builder, opname) def f(self, op, arglocs, regalloc, fcond): assert fcond is not None - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + if op.result: + self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + else: + self.mc.PUSH([reg.value for reg in r.caller_resp]) helper(self.mc, fcond) - self.mc.POP([reg.value for reg in r.caller_resp][1:]) + if op.result: + self.mc.POP([reg.value for reg in r.caller_resp][1:]) + else: + self.mc.POP([reg.value for reg in r.caller_resp]) return fcond return f From commits-noreply at bitbucket.org Wed Mar 30 13:36:09 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 13:36:09 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: return the condition flag in generator helpers Message-ID: <20110330113609.56EDC282BE8@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43011:997621fd4bc9 Date: 2011-03-30 13:25 +0200 http://bitbucket.org/pypy/pypy/changeset/997621fd4bc9/ Log: return the condition flag in generator helpers diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -52,15 +52,17 @@ def gen_emit_float_op(opname): op_rr = getattr(AbstractARMv7Builder, opname) - def f(self, op, arglocs, regalloc, fcon): + def f(self, op, arglocs, regalloc, fcond): arg1, arg2, result = arglocs op_rr(self.mc, result.value, arg1.value, arg2.value) + return fcond return f def gen_emit_unary_float_op(opname): op_rr = getattr(AbstractARMv7Builder, opname) - def f(self, op, arglocs, regalloc, fcon): + def f(self, op, arglocs, regalloc, fcond): arg1, result = arglocs op_rr(self.mc, result.value, arg1.value) + return fcond return f def gen_emit_float_cmp_op(cond): From commits-noreply at bitbucket.org Wed Mar 30 13:36:10 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 13:36:10 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: support floats in bridge entry code Message-ID: <20110330113610.A30F2282BE8@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43012:e91ef6ac6375 Date: 2011-03-30 13:32 +0200 http://bitbucket.org/pypy/pypy/changeset/e91ef6ac6375/ Log: support floats in bridge entry code diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py --- a/pypy/jit/backend/arm/regalloc.py +++ b/pypy/jit/backend/arm/regalloc.py @@ -156,17 +156,24 @@ arg = inputargs[i] i += 1 if loc.is_reg(): - self.reg_bindings[arg] = loc + if arg.type == FLOAT: + self.vfprm.reg_bindings[arg] = loc + else: + self.rm.reg_bindings[arg] = loc #XXX add float else: self.frame_manager.frame_bindings[arg] = loc used[loc] = None # XXX combine with x86 code and move to llsupport - self.free_regs = [] - for reg in self.all_regs: + self.rm.free_regs = [] + for reg in self.rm.all_regs: if reg not in used: - self.free_regs.append(reg) + self.rm.free_regs.append(reg) + self.vfprm.free_regs = [] + for reg in self.vfprm.all_regs: + if reg not in used: + self.vfprm.free_regs.append(reg) # note: we need to make a copy of inputargs because possibly_free_vars # is also used on op args, which is a non-resizable list self.possibly_free_vars(list(inputargs)) From commits-noreply at bitbucket.org Wed Mar 30 13:36:11 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 13:36:11 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: categorize floating point registers to be saved around calls Message-ID: <20110330113611.A3E49282BE8@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43013:08888655a2dc Date: 2011-03-30 13:35 +0200 http://bitbucket.org/pypy/pypy/changeset/08888655a2dc/ Log: categorize floating point registers to be saved around calls diff --git a/pypy/jit/backend/arm/registers.py b/pypy/jit/backend/arm/registers.py --- a/pypy/jit/backend/arm/registers.py +++ b/pypy/jit/backend/arm/registers.py @@ -21,3 +21,9 @@ callee_resp = [r4, r5, r6, r7, r8, r9, r10, fp] callee_saved_registers = callee_resp+[lr] callee_restored_registers = callee_resp+[pc] + +caller_vfp_resp = [d0, d1, d2, d3, d4, d5, d6, d7] +callee_vfp_resp = [d8, d9, d10, d11, d12, d13, d14, d15] + +callee_saved_vfp_registers = callee_vfp_resp + From commits-noreply at bitbucket.org Wed Mar 30 13:59:35 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 30 Mar 2011 13:59:35 +0200 (CEST) Subject: [pypy-svn] pypy default: kill the virtualref_index field: it is never used Message-ID: <20110330115935.13F0A282BDC@codespeak.net> Author: Antonio Cuni Branch: Changeset: r43014:ec269923271d Date: 2011-03-30 13:59 +0200 http://bitbucket.org/pypy/pypy/changeset/ec269923271d/ Log: kill the virtualref_index field: it is never used diff --git a/pypy/jit/metainterp/test/test_optimizeopt.py b/pypy/jit/metainterp/test/test_optimizeopt.py --- a/pypy/jit/metainterp/test/test_optimizeopt.py +++ b/pypy/jit/metainterp/test/test_optimizeopt.py @@ -3112,7 +3112,6 @@ i0 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i0, descr=virtualtokendescr) - setfield_gc(p2, 5, descr=virtualrefindexdescr) escape(p2) setfield_gc(p2, p1, descr=virtualforceddescr) setfield_gc(p2, -3, descr=virtualtokendescr) @@ -3145,7 +3144,6 @@ # p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 3, descr=virtualrefindexdescr) setfield_gc(p0, p2, descr=nextdescr) # call_may_force(i1, descr=mayforcevirtdescr) @@ -3185,7 +3183,6 @@ # p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 2, descr=virtualrefindexdescr) setfield_gc(p0, p2, descr=nextdescr) # call_may_force(i1, descr=mayforcevirtdescr) @@ -3252,7 +3249,7 @@ #self.loop.inputargs[0].value = self.nodeobjvalue #self.check_expanded_fail_descr('''p2, p1 # p0.refdescr = p2 - # where p2 is a jit_virtual_ref_vtable, virtualtokendescr=i3, virtualrefindexdescr=2 + # where p2 is a jit_virtual_ref_vtable, virtualtokendescr=i3 # where p1 is a node_vtable, nextdescr=p1b # where p1b is a node_vtable, valuedescr=i1 # ''', rop.GUARD_NO_EXCEPTION) @@ -3273,7 +3270,6 @@ i3 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 7, descr=virtualrefindexdescr) escape(p2) p1 = new_with_vtable(ConstClass(node_vtable)) setfield_gc(p2, p1, descr=virtualforceddescr) @@ -3299,7 +3295,6 @@ i3 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 23, descr=virtualrefindexdescr) escape(p2) setfield_gc(p2, p1, descr=virtualforceddescr) setfield_gc(p2, -3, descr=virtualtokendescr) diff --git a/pypy/jit/metainterp/virtualref.py b/pypy/jit/metainterp/virtualref.py --- a/pypy/jit/metainterp/virtualref.py +++ b/pypy/jit/metainterp/virtualref.py @@ -13,7 +13,6 @@ self.JIT_VIRTUAL_REF = lltype.GcStruct('JitVirtualRef', ('super', rclass.OBJECT), ('virtual_token', lltype.Signed), - ('virtualref_index', lltype.Signed), ('forced', rclass.OBJECTPTR)) self.jit_virtual_ref_vtable = lltype.malloc(rclass.OBJECT_VTABLE, zero=True, flavor='raw', @@ -27,8 +26,6 @@ fielddescrof = self.cpu.fielddescrof self.descr_virtual_token = fielddescrof(self.JIT_VIRTUAL_REF, 'virtual_token') - self.descr_virtualref_index = fielddescrof(self.JIT_VIRTUAL_REF, - 'virtualref_index') self.descr_forced = fielddescrof(self.JIT_VIRTUAL_REF, 'forced') # # record the type JIT_VIRTUAL_REF explicitly in the rtyper, too diff --git a/pypy/jit/metainterp/test/test_optimizeutil.py b/pypy/jit/metainterp/test/test_optimizeutil.py --- a/pypy/jit/metainterp/test/test_optimizeutil.py +++ b/pypy/jit/metainterp/test/test_optimizeutil.py @@ -147,7 +147,6 @@ FakeWarmRunnerDesc.cpu = cpu vrefinfo = VirtualRefInfo(FakeWarmRunnerDesc) virtualtokendescr = vrefinfo.descr_virtual_token - virtualrefindexdescr = vrefinfo.descr_virtualref_index virtualforceddescr = vrefinfo.descr_forced jit_virtual_ref_vtable = vrefinfo.jit_virtual_ref_vtable jvr_vtable_adr = llmemory.cast_ptr_to_adr(jit_virtual_ref_vtable) diff --git a/pypy/jit/metainterp/optimizeopt/virtualize.py b/pypy/jit/metainterp/optimizeopt/virtualize.py --- a/pypy/jit/metainterp/optimizeopt/virtualize.py +++ b/pypy/jit/metainterp/optimizeopt/virtualize.py @@ -286,7 +286,6 @@ vrefinfo = self.optimizer.metainterp_sd.virtualref_info c_cls = vrefinfo.jit_virtual_ref_const_class descr_virtual_token = vrefinfo.descr_virtual_token - descr_virtualref_index = vrefinfo.descr_virtualref_index # # Replace the VIRTUAL_REF operation with a virtual structure of type # 'jit_virtual_ref'. The jit_virtual_ref structure may be forced soon, @@ -296,7 +295,6 @@ tokenbox = BoxInt() self.emit_operation(ResOperation(rop.FORCE_TOKEN, [], tokenbox)) vrefvalue.setfield(descr_virtual_token, self.getvalue(tokenbox)) - vrefvalue.setfield(descr_virtualref_index, self.getvalue(indexbox)) def optimize_VIRTUAL_REF_FINISH(self, op): # Set the 'forced' field of the virtual_ref. From commits-noreply at bitbucket.org Wed Mar 30 14:44:38 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 30 Mar 2011 14:44:38 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: nonsense Message-ID: <20110330124438.2AB1A282BDC@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3441:84fec6a15849 Date: 2011-03-30 13:09 +0200 http://bitbucket.org/pypy/extradoc/changeset/84fec6a15849/ Log: nonsense diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -828,7 +828,7 @@ For space reasons we cannot perform a full evaluation here, but still want to present some benchmark numbers. We chose to present two benchmarks, a port of -the classical Richards benchmark in RPython and a Python version of the Telco +the classical Richards benchmark in Python and a Python version of the Telco decimal benchmark\footnote{\texttt{http://speleotrove.com/decimal/telco.html}}, using a pure Python decimal floating point implementation. From commits-noreply at bitbucket.org Wed Mar 30 14:44:38 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 30 Mar 2011 14:44:38 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: analyze a bit Message-ID: <20110330124438.ED394282BDC@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3442:3dc0fd6d9568 Date: 2011-03-30 14:13 +0200 http://bitbucket.org/pypy/extradoc/changeset/3dc0fd6d9568/ Log: analyze a bit diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -830,7 +830,10 @@ present some benchmark numbers. We chose to present two benchmarks, a port of the classical Richards benchmark in Python and a Python version of the Telco decimal benchmark\footnote{\texttt{http://speleotrove.com/decimal/telco.html}}, -using a pure Python decimal floating point implementation. +using a pure Python decimal floating point implementation. The results we see in +these two benchmarks seem to repeat themselves in other benchmarks using +object-oriented code, for purely numerical algorithms the speedups are a lot +lower. The benchmarks were run on an otherwise idle Intel Core2 Duo P8400 processor with 2.26 GHz and 3072 KB of cache on a machine with 3GB RAM running Linux @@ -848,7 +851,13 @@ 95\% confidence level \cite{georges_statistically_2007}. The results are reported in Figure~\ref{fig:times}. -XXX analysis +Versioned types speed up both benchmarks by a significant factor of around 7. +The speed advantage of maps alones is a lot less clear. Maps also have a memory +advantage which we did not measure here. By themselves, maps improved the +Richards benchmark slightly, but made the Telco benchmark slower. Enabling both +maps and versioned types together yields a significant improvement over just +versioned types for Richards. XXX good explanation. For Telco, enabling both +does not change much. \begin{figure} {\footnotesize diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 18cc2e04014e1ad1268d5c358d54050e5befe84a..63c11b14ed374c66f39fb64e365d052952c34502 GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 14:44:40 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 30 Mar 2011 14:44:40 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: a minimal abstract Message-ID: <20110330124440.958A32A202F@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3443:76798870426a Date: 2011-03-30 14:44 +0200 http://bitbucket.org/pypy/extradoc/changeset/76798870426a/ Log: a minimal abstract diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -100,7 +100,14 @@ \begin{abstract} -XXX +A meta-tracing JIT is a JIT that is applicable to a variety of different +languages without explicitly encoding language semantics into the compiler. So +far, meta-tracing JITs lacked a way to feed back runtime information into the +compiler, which restricted their performance. In this paper we describe the +mechanisms in PyPy's meta-tracing JIT that can be used to control runtime +feedback in flexible and language-specific ways. These mechanisms are flexible +enough to implement classical VM techniques such as maps and polymorphic inline +caches. \end{abstract} diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 63c11b14ed374c66f39fb64e365d052952c34502..0309aaa5d3ba880320e669b9cbf90865a230fb87 GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 15:26:12 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 30 Mar 2011 15:26:12 +0200 (CEST) Subject: [pypy-svn] pypy default: raise a specific exception in this case, which makes it easier to catch the case of unsupported terminal Message-ID: <20110330132612.5D7CD282BDC@codespeak.net> Author: Antonio Cuni Branch: Changeset: r43015:8e3e376aa56b Date: 2011-03-30 15:07 +0200 http://bitbucket.org/pypy/pypy/changeset/8e3e376aa56b/ Log: raise a specific exception in this case, which makes it easier to catch the case of unsupported terminal diff --git a/lib_pypy/pyrepl/unix_console.py b/lib_pypy/pyrepl/unix_console.py --- a/lib_pypy/pyrepl/unix_console.py +++ b/lib_pypy/pyrepl/unix_console.py @@ -27,7 +27,10 @@ from pyrepl.console import Console, Event from pyrepl import unix_eventqueue -_error = (termios.error, curses.error) +class InvalidTerminal(RuntimeError): + pass + +_error = (termios.error, curses.error, InvalidTerminal) # there are arguments for changing this to "refresh" SIGWINCH_EVENT = 'repaint' @@ -38,7 +41,7 @@ def _my_getstr(cap, optional=0): r = curses.tigetstr(cap) if not optional and r is None: - raise RuntimeError, \ + raise InvalidTerminal, \ "terminal doesn't have the required '%s' capability"%cap return r From commits-noreply at bitbucket.org Wed Mar 30 15:26:12 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Wed, 30 Mar 2011 15:26:12 +0200 (CEST) Subject: [pypy-svn] pypy default: make sure not to run _setup twice, else we might get infinite recursion between raw_input and _old_raw_input Message-ID: <20110330132612.F04C9282BDF@codespeak.net> Author: Antonio Cuni Branch: Changeset: r43016:71e57acf2ca9 Date: 2011-03-30 15:24 +0200 http://bitbucket.org/pypy/pypy/changeset/71e57acf2ca9/ Log: make sure not to run _setup twice, else we might get infinite recursion between raw_input and _old_raw_input diff --git a/lib_pypy/pyrepl/readline.py b/lib_pypy/pyrepl/readline.py --- a/lib_pypy/pyrepl/readline.py +++ b/lib_pypy/pyrepl/readline.py @@ -381,6 +381,9 @@ def _setup(): global _old_raw_input + if _old_raw_input is not None: + return # don't run _setup twice + try: f_in = sys.stdin.fileno() f_out = sys.stdout.fileno() @@ -401,4 +404,5 @@ _old_raw_input = __builtin__.raw_input __builtin__.raw_input = _wrapper.raw_input +_old_raw_input = None _setup() From commits-noreply at bitbucket.org Wed Mar 30 15:37:11 2011 From: commits-noreply at bitbucket.org (cfbolz) Date: Wed, 30 Mar 2011 15:37:11 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: remember the issue about purefunction and version type lookups Message-ID: <20110330133711.400BF282BDC@codespeak.net> Author: Carl Friedrich Bolz Branch: extradoc Changeset: r3444:5ebbd50a83d0 Date: 2011-03-30 15:37 +0200 http://bitbucket.org/pypy/extradoc/changeset/5ebbd50a83d0/ Log: remember the issue about purefunction and version type lookups diff --git a/planning/jit.txt b/planning/jit.txt --- a/planning/jit.txt +++ b/planning/jit.txt @@ -11,6 +11,9 @@ NEW TASKS --------- +- think about whether W_TypeObject._pure_lookup_where_with_method_cache needs a + different decorator, because it cannot be moved around arbitrarily. + - have benchmarks for jit compile time and jit memory usage - kill GUARD_(NO)_EXCEPTION; replace that by LAST_EXC_VALUE to load the From commits-noreply at bitbucket.org Wed Mar 30 16:59:26 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 30 Mar 2011 16:59:26 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: A branch in which to play with supporting shadowstack in the JIT. Message-ID: <20110330145926.C9700282BDC@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43017:9501ceb52751 Date: 2011-03-30 16:58 +0200 http://bitbucket.org/pypy/pypy/changeset/9501ceb52751/ Log: A branch in which to play with supporting shadowstack in the JIT. A possible idea would be to keep the current structure in the JIT's backend, and put a marker in the shadowstack when we enter the assembler. It would let the assembler code run at full speed while getting rid of the asmgcc dependency. From commits-noreply at bitbucket.org Wed Mar 30 17:31:30 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 17:31:30 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: merge arm-backend-2 Message-ID: <20110330153130.6B8B6282BDC@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43018:4a09444cff4d Date: 2011-03-30 13:54 +0200 http://bitbucket.org/pypy/pypy/changeset/4a09444cff4d/ Log: merge arm-backend-2 diff --git a/pypy/module/readline/test/test_c_readline.py b/pypy/module/readline/test/test_c_readline.py deleted file mode 100644 --- a/pypy/module/readline/test/test_c_readline.py +++ /dev/null @@ -1,16 +0,0 @@ -""" -Directly test the basic ctypes wrappers. -""" - -import py -from pypy import conftest; conftest.translation_test_so_skip_if_appdirect() -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -def test_basic_import(): - c_readline.c_rl_initialize() diff --git a/pypy/module/__builtin__/app_file_stub.py b/pypy/module/__builtin__/app_file_stub.py deleted file mode 100644 --- a/pypy/module/__builtin__/app_file_stub.py +++ /dev/null @@ -1,20 +0,0 @@ -# NOT_RPYTHON - -class file(object): - """file(name[, mode[, buffering]]) -> file object - -Open a file. The mode can be 'r', 'w' or 'a' for reading (default), -writing or appending. The file will be created if it doesn't exist -when opened for writing or appending; it will be truncated when -opened for writing. Add a 'b' to the mode for binary files. -Add a '+' to the mode to allow simultaneous reading and writing. -If the buffering argument is given, 0 means unbuffered, 1 means line -buffered, and larger numbers specify the buffer size. -Add a 'U' to mode to open the file for input with universal newline -support. Any line ending in the input file will be seen as a '\n' -in Python. Also, a file so opened gains the attribute 'newlines'; -the value for this attribute is one of None (no newline read yet), -'\r', '\n', '\r\n' or a tuple containing all the newline types seen. - -Note: open() is an alias for file(). -""" diff --git a/pypy/module/_rawffi/error.py b/pypy/module/_rawffi/error.py deleted file mode 100644 --- a/pypy/module/_rawffi/error.py +++ /dev/null @@ -1,2 +0,0 @@ -class SegfaultException(Exception): - pass diff --git a/pypy/module/_socket/app_socket.py b/pypy/module/_socket/app_socket.py deleted file mode 100644 --- a/pypy/module/_socket/app_socket.py +++ /dev/null @@ -1,15 +0,0 @@ -"""Implementation module for socket operations. - -See the socket module for documentation.""" - -class error(IOError): - pass - -class herror(error): - pass - -class gaierror(error): - pass - -class timeout(error): - pass diff --git a/pypy/module/readline/__init__.py b/pypy/module/readline/__init__.py deleted file mode 100644 --- a/pypy/module/readline/__init__.py +++ /dev/null @@ -1,45 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.mixedmodule import MixedModule - -# XXX raw_input needs to check for space.readline_func and use -# it if its there - -class Module(MixedModule): - """Importing this module enables command line editing using GNU readline.""" - # the above line is the doc string of the translated module - - def setup_after_space_initialization(self): - from pypy.module.readline import c_readline - c_readline.setup_readline(self.space, self) - - interpleveldefs = { - 'readline' : 'interp_readline.readline', - } - - appleveldefs = { - 'parse_and_bind': 'app_stub.stub', - 'get_line_buffer': 'app_stub.stub_str', - 'insert_text': 'app_stub.stub', - 'read_init_file': 'app_stub.stub', - 'read_history_file': 'app_stub.stub', - 'write_history_file': 'app_stub.stub', - 'clear_history': 'app_stub.stub', - 'get_history_length': 'app_stub.stub_int', - 'set_history_length': 'app_stub.stub', - 'get_current_history_length': 'app_stub.stub_int', - 'get_history_item': 'app_stub.stub_str', - 'remove_history_item': 'app_stub.stub', - 'replace_history_item': 'app_stub.stub', - 'redisplay': 'app_stub.stub', - 'set_startup_hook': 'app_stub.stub', - 'set_pre_input_hook': 'app_stub.stub', - 'set_completer': 'app_stub.stub', - 'get_completer': 'app_stub.stub', - 'get_begidx': 'app_stub.stub_int', - 'get_endidx': 'app_stub.stub_int', - 'set_completer_delims': 'app_stub.stub', - 'get_completer_delims': 'app_stub.stub_str', - 'add_history': 'app_stub.stub', - } diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py --- a/pypy/jit/backend/arm/opassembler.py +++ b/pypy/jit/backend/arm/opassembler.py @@ -87,9 +87,9 @@ emit_op_int_and = gen_emit_op_ri('AND') emit_op_int_or = gen_emit_op_ri('ORR') emit_op_int_xor = gen_emit_op_ri('EOR') - emit_op_int_lshift = gen_emit_op_ri('LSL', imm_size=0x1F, allow_zero=False, commutative=False) - emit_op_int_rshift = gen_emit_op_ri('ASR', imm_size=0x1F, allow_zero=False, commutative=False) - emit_op_uint_rshift = gen_emit_op_ri('LSR', imm_size=0x1F, allow_zero=False, commutative=False) + emit_op_int_lshift = gen_emit_op_ri('LSL') + emit_op_int_rshift = gen_emit_op_ri('ASR') + emit_op_uint_rshift = gen_emit_op_ri('LSR') emit_op_int_lt = gen_emit_cmp_op(c.LT) emit_op_int_le = gen_emit_cmp_op(c.LE) @@ -133,7 +133,7 @@ _mixin_ = True - guard_size = 10*WORD + guard_size = 5*WORD def _emit_guard(self, op, arglocs, fcond, save_exc=False): descr = op.getdescr() assert isinstance(descr, AbstractFailDescr) @@ -143,12 +143,9 @@ self.mc.ADD_ri(r.pc.value, r.pc.value, self.guard_size-PC_OFFSET, cond=fcond) descr._arm_guard_pos = self.mc.currpos() - self.mc.PUSH([reg.value for reg in r.caller_resp]) - addr = self.cpu.get_on_leave_jitted_int(save_exception=save_exc) - self.mc.BL(addr) - self.mc.POP([reg.value for reg in r.caller_resp]) - memaddr = self._gen_path_to_exit_path(op, op.getfailargs(), arglocs) + memaddr = self._gen_path_to_exit_path(op, op.getfailargs(), + arglocs, save_exc=save_exc) descr._failure_recovery_code = memaddr return c.AL @@ -235,17 +232,19 @@ else: target = descr._arm_bootstrap_code + descr._arm_loop_code self.mc.B(target, fcond) + new_fd = max(regalloc.frame_manager.frame_depth, descr._arm_frame_depth) + regalloc.frame_manager.frame_depth = new_fd return fcond def emit_op_finish(self, op, arglocs, regalloc, fcond): self._gen_path_to_exit_path(op, op.getarglist(), arglocs, c.AL) return fcond - def emit_op_call(self, op, args, regalloc, fcond, spill_all_regs=False): + def emit_op_call(self, op, args, regalloc, fcond): adr = args[0].value arglist = op.getarglist()[1:] cond = self._emit_call(adr, arglist, regalloc, fcond, - op.result, spill_all_regs=spill_all_regs) + op.result) descr = op.getdescr() #XXX Hack, Hack, Hack if op.result and not we_are_translated() and not isinstance(descr, LoopToken): @@ -256,10 +255,9 @@ return cond # XXX improve this interface - # XXX and get rid of spill_all_regs in favor of pushing them in # emit_op_call_may_force # XXX improve freeing of stuff here - def _emit_call(self, adr, args, regalloc, fcond=c.AL, result=None, spill_all_regs=False): + def _emit_call(self, adr, args, regalloc, fcond=c.AL, result=None): n = 0 n_args = len(args) reg_args = min(n_args, 4) @@ -268,16 +266,17 @@ l = regalloc.make_sure_var_in_reg(args[i], selected_reg=r.all_regs[i]) # save caller saved registers - if spill_all_regs: - regalloc.before_call(save_all_regs=spill_all_regs) + if result: + # XXX hack if the call has a result force the value in r0 to be + # spilled + if reg_args == 0 or (isinstance(args[0], Box) and + regalloc.stays_alive(args[0])): + t = TempBox() + regalloc.force_allocate_reg(t, selected_reg=regalloc.call_result_location(t)) + regalloc.possibly_free_var(t) + self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) else: - if result: - # XXX maybe move instance check to llsupport/regalloc - if reg_args > 0 and isinstance(args[0], Box) and regalloc.stays_alive(args[0]): - regalloc.force_spill_var(args[0]) - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) - else: - self.mc.PUSH([reg.value for reg in r.caller_resp]) + self.mc.PUSH([reg.value for reg in r.caller_resp]) # all arguments past the 4th go on the stack if n_args > 4: @@ -297,14 +296,11 @@ self._adjust_sp(-n, fcond=fcond) # restore the argumets stored on the stack - if spill_all_regs: + if result is not None: regalloc.after_call(result) + self.mc.POP([reg.value for reg in r.caller_resp][1:]) else: - if result is not None: - regalloc.after_call(result) - self.mc.POP([reg.value for reg in r.caller_resp][1:]) - else: - self.mc.POP([reg.value for reg in r.caller_resp]) + self.mc.POP([reg.value for reg in r.caller_resp]) return fcond def emit_op_same_as(self, op, arglocs, regalloc, fcond): @@ -351,11 +347,20 @@ def emit_op_setfield_gc(self, op, arglocs, regalloc, fcond): value_loc, base_loc, ofs, size = arglocs if size.value == 4: - self.mc.STR_ri(value_loc.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.STR_ri(value_loc.value, base_loc.value, ofs.value) + else: + self.mc.STR_rr(value_loc.value, base_loc.value, ofs.value) elif size.value == 2: - self.mc.STRH_ri(value_loc.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.STRH_ri(value_loc.value, base_loc.value, ofs.value) + else: + self.mc.STRH_rr(value_loc.value, base_loc.value, ofs.value) elif size.value == 1: - self.mc.STRB_ri(value_loc.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.STRB_ri(value_loc.value, base_loc.value, ofs.value) + else: + self.mc.STRB_rr(value_loc.value, base_loc.value, ofs.value) else: assert 0 return fcond @@ -365,11 +370,20 @@ def emit_op_getfield_gc(self, op, arglocs, regalloc, fcond): base_loc, ofs, res, size = arglocs if size.value == 4: - self.mc.LDR_ri(res.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.LDR_ri(res.value, base_loc.value, ofs.value) + else: + self.mc.LDR_rr(res.value, base_loc.value, ofs.value) elif size.value == 2: - self.mc.LDRH_ri(res.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.LDRH_ri(res.value, base_loc.value, ofs.value) + else: + self.mc.LDRH_rr(res.value, base_loc.value, ofs.value) elif size.value == 1: - self.mc.LDRB_ri(res.value, base_loc.value, ofs.value) + if ofs.is_imm(): + self.mc.LDRB_ri(res.value, base_loc.value, ofs.value) + else: + self.mc.LDRB_rr(res.value, base_loc.value, ofs.value) else: assert 0 @@ -633,11 +647,16 @@ # from: ../x86/assembler.py:1668 # XXX Split into some helper methods def emit_guard_call_assembler(self, op, guard_op, arglocs, regalloc, fcond): + faildescr = guard_op.getdescr() + fail_index = self.cpu.get_fail_descr_number(faildescr) + self._write_fail_index(fail_index) + descr = op.getdescr() assert isinstance(descr, LoopToken) + assert op.numargs() == len(descr._arm_arglocs) resbox = TempBox() self._emit_call(descr._arm_direct_bootstrap_code, op.getarglist(), - regalloc, fcond, result=resbox, spill_all_regs=True) + regalloc, fcond, result=resbox) if op.result is None: value = self.cpu.done_with_this_frame_void_v else: @@ -650,56 +669,56 @@ value = self.cpu.done_with_this_frame_float_v else: raise AssertionError(kind) - assert value <= 0xff - # check value - resloc = regalloc.force_allocate_reg(resbox) + resloc = regalloc.try_allocate_reg(resbox) + assert resloc is r.r0 self.mc.gen_load_int(r.ip.value, value) self.mc.CMP_rr(resloc.value, r.ip.value) + regalloc.possibly_free_var(resbox) fast_jmp_pos = self.mc.currpos() - #fast_jmp_location = self.mc.curraddr() self.mc.NOP() - #if values are equal we take the fast pat + # Path A: use assembler helper + #if values are equal we take the fast path # Slow path, calling helper # jump to merge point jd = descr.outermost_jitdriver_sd assert jd is not None asm_helper_adr = self.cpu.cast_adr_to_int(jd.assembler_helper_adr) - self._emit_call(asm_helper_adr, [resbox, op.getarg(0)], regalloc, fcond, op.result) - regalloc.possibly_free_var(resbox) + self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + # resbox is allready in r0 + self.mov_loc_loc(arglocs[1], r.r1) + self.mc.BL(asm_helper_adr) + self.mc.POP([reg.value for reg in r.caller_resp][1:]) + if op.result: + regalloc.after_call(op.result) # jump to merge point jmp_pos = self.mc.currpos() #jmp_location = self.mc.curraddr() self.mc.NOP() + # Path B: load return value and reset token # Fast Path using result boxes # patch the jump to the fast path offset = self.mc.currpos() - fast_jmp_pos pmc = OverwritingBuilder(self.mc, fast_jmp_pos, WORD) - #pmc = ARMv7InMemoryBuilder(fast_jmp_location, WORD) pmc.ADD_ri(r.pc.value, r.pc.value, offset - PC_OFFSET, cond=c.EQ) # Reset the vable token --- XXX really too much special logic here:-( - # XXX Enable and fix this once the stange errors procuded by its - # presence are fixed - #if jd.index_of_virtualizable >= 0: - # from pypy.jit.backend.llsupport.descr import BaseFieldDescr - # size = jd.portal_calldescr.get_result_size(self.cpu.translate_support_code) - # vable_index = jd.index_of_virtualizable - # regalloc._sync_var(op.getarg(vable_index)) - # vable = regalloc.frame_manager.loc(op.getarg(vable_index)) - # fielddescr = jd.vable_token_descr - # assert isinstance(fielddescr, BaseFieldDescr) - # ofs = fielddescr.offset - # self.mc.MOV(eax, arglocs[1]) - # self.mc.MOV_mi((eax.value, ofs), 0) - # # in the line above, TOKEN_NONE = 0 + if jd.index_of_virtualizable >= 0: + from pypy.jit.backend.llsupport.descr import BaseFieldDescr + fielddescr = jd.vable_token_descr + assert isinstance(fielddescr, BaseFieldDescr) + ofs = fielddescr.offset + resloc = regalloc.force_allocate_reg(resbox) + self.mov_loc_loc(arglocs[1], r.ip) + self.mc.MOV_ri(resloc.value, 0) + self.mc.STR_ri(resloc.value, r.ip.value, ofs) + regalloc.possibly_free_var(resbox) if op.result is not None: # load the return value from fail_boxes_xxx[0] - resloc = regalloc.force_allocate_reg(op.result) kind = op.result.type if kind == INT: adr = self.fail_boxes_int.get_addr_for_num(0) @@ -707,22 +726,35 @@ adr = self.fail_boxes_ptr.get_addr_for_num(0) else: raise AssertionError(kind) + resloc = regalloc.force_allocate_reg(op.result) + regalloc.possibly_free_var(resbox) self.mc.gen_load_int(r.ip.value, adr) self.mc.LDR_ri(resloc.value, r.ip.value) + # merge point offset = self.mc.currpos() - jmp_pos - pmc = OverwritingBuilder(self.mc, jmp_pos, WORD) - pmc.ADD_ri(r.pc.value, r.pc.value, offset - PC_OFFSET) + if offset - PC_OFFSET >= 0: + pmc = OverwritingBuilder(self.mc, jmp_pos, WORD) + pmc.ADD_ri(r.pc.value, r.pc.value, offset - PC_OFFSET) self.mc.LDR_ri(r.ip.value, r.fp.value) self.mc.CMP_ri(r.ip.value, 0) self._emit_guard(guard_op, regalloc._prepare_guard(guard_op), c.GE) - regalloc.possibly_free_vars_for_op(op) - if op.result: - regalloc.possibly_free_var(op.result) return fcond + + # ../x86/assembler.py:668 + def redirect_call_assembler(self, oldlooptoken, newlooptoken): + # we overwrite the instructions at the old _x86_direct_bootstrap_code + # to start with a JMP to the new _x86_direct_bootstrap_code. + # Ideally we should rather patch all existing CALLs, but well. + oldadr = oldlooptoken._arm_direct_bootstrap_code + target = newlooptoken._arm_direct_bootstrap_code + mc = ARMv7Builder() + mc.B(target) + mc.copy_to_raw_memory(oldadr) + def emit_guard_call_may_force(self, op, guard_op, arglocs, regalloc, fcond): self.mc.LDR_ri(r.ip.value, r.fp.value) self.mc.CMP_ri(r.ip.value, 0) diff --git a/pypy/jit/metainterp/test/test_executor.py b/pypy/jit/metainterp/test/test_executor.py --- a/pypy/jit/metainterp/test/test_executor.py +++ b/pypy/jit/metainterp/test/test_executor.py @@ -10,6 +10,7 @@ from pypy.jit.metainterp.history import BoxFloat, ConstFloat from pypy.jit.metainterp.history import AbstractDescr, Box from pypy.jit.metainterp import history +from pypy.jit.codewriter import longlong from pypy.jit.backend.model import AbstractCPU from pypy.rpython.lltypesystem import llmemory, rffi @@ -59,11 +60,17 @@ def bh_call_f(self, func, calldescr, args_i, args_r, args_f): self.fakecalled = (func, calldescr, args_i, args_r, args_f) - return 42.5 + return longlong.getfloatstorage(42.5) def bh_strsetitem(self, string, index, newvalue): self.fakestrsetitem = (string, index, newvalue) +def boxfloat(x): + return BoxFloat(longlong.getfloatstorage(x)) + +def constfloat(x): + return ConstFloat(longlong.getfloatstorage(x)) + def test_execute(): cpu = FakeCPU() @@ -76,12 +83,14 @@ def test_execute_varargs(): cpu = FakeCPU() descr = FakeCallDescr() - argboxes = [BoxInt(99999), BoxInt(321), ConstFloat(2.25), ConstInt(123), - BoxPtr(), BoxFloat(5.5)] + argboxes = [BoxInt(99999), BoxInt(321), constfloat(2.25), ConstInt(123), + BoxPtr(), boxfloat(5.5)] box = execute_varargs(cpu, FakeMetaInterp(), rop.CALL, argboxes, descr) - assert box.value == 42.5 + assert box.getfloat() == 42.5 assert cpu.fakecalled == (99999, descr, [321, 123], - [ConstPtr.value], [2.25, 5.5]) + [ConstPtr.value], + [longlong.getfloatstorage(2.25), + longlong.getfloatstorage(5.5)]) def test_execute_nonspec(): cpu = FakeCPU() @@ -91,7 +100,7 @@ argboxes = [BoxInt(321), ConstInt(123)] box = execute_nonspec(cpu, FakeMetaInterp(), rop.CALL, argboxes, FakeCallDescr()) - assert box.value == 42.5 + assert box.getfloat() == 42.5 # arity == 0 box = execute_nonspec(cpu, None, rop.NEW, [], descr) assert box.value.fakeargs == ('new', descr) @@ -100,7 +109,7 @@ box = execute_nonspec(cpu, None, rop.ARRAYLEN_GC, [box1], descr) assert box.value == 55 # arity == 2 - box2 = BoxFloat(222.2) + box2 = boxfloat(222.2) fielddescr = FakeFieldDescr() execute_nonspec(cpu, None, rop.SETFIELD_GC, [box1, box2], fielddescr) assert cpu.fakesetfield == (box1.value, box2.value, fielddescr) @@ -289,7 +298,7 @@ boxargs = [] for x in args: if isinstance(x, float): - boxargs.append(BoxFloat(x)) + boxargs.append(boxfloat(x)) else: boxargs.append(BoxInt(x)) yield opnum, boxargs, rettype, retvalue @@ -300,7 +309,7 @@ if (isinstance(args[0], float) and isinstance(args[1], float) and args[0] == args[1]): - commonbox = BoxFloat(args[0]) + commonbox = boxfloat(args[0]) yield opnum, [commonbox, commonbox], rettype, retvalue def test_float_ops(): @@ -320,8 +329,8 @@ arg1 = ConstInt(a) arg2 = ConstInt(b) elif n[0:5] == 'FLOAT': - arg1 = ConstFloat(float(a)) - arg2 = ConstFloat(float(b)) + arg1 = constfloat(float(a)) + arg2 = constfloat(float(b)) elif n[0:3] == 'PTR': arg1 = ConstPtr(rffi.cast(llmemory.GCREF, a)) arg2 = ConstPtr(rffi.cast(llmemory.GCREF, b)) diff --git a/pypy/module/thread/app_thread.py b/pypy/module/thread/app_thread.py deleted file mode 100644 --- a/pypy/module/thread/app_thread.py +++ /dev/null @@ -1,7 +0,0 @@ -class error(Exception): - pass - -def exit(): - """This is synonymous to ``raise SystemExit''. It will cause the current -thread to exit silently unless the exception is caught.""" - raise SystemExit diff --git a/pypy/translator/c/test/test_dtoa.py b/pypy/translator/c/test/test_dtoa.py deleted file mode 100644 --- a/pypy/translator/c/test/test_dtoa.py +++ /dev/null @@ -1,92 +0,0 @@ -from __future__ import with_statement -from pypy.translator.tool.cbuild import ExternalCompilationInfo -from pypy.tool.autopath import pypydir -from pypy.rpython.lltypesystem import lltype, rffi -from pypy.rlib.rstring import StringBuilder -import py - -includes = [] -libraries = [] - -cdir = py.path.local(pypydir) / 'translator' / 'c' -files = [cdir / 'src' / 'dtoa.c'] -include_dirs = [cdir] - -eci = ExternalCompilationInfo( - include_dirs = include_dirs, - libraries = libraries, - separate_module_files = files, - separate_module_sources = [''' - #include - #include - #define WITH_PYMALLOC - #include "src/obmalloc.c" - '''], - export_symbols = ['_Py_dg_strtod', - '_Py_dg_dtoa', - '_Py_dg_freedtoa', - ], -) - -dg_strtod = rffi.llexternal( - '_Py_dg_strtod', [rffi.CCHARP, rffi.CCHARPP], rffi.DOUBLE, - compilation_info=eci) - -dg_dtoa = rffi.llexternal( - '_Py_dg_dtoa', [rffi.DOUBLE, rffi.INT, rffi.INT, - rffi.INTP, rffi.INTP, rffi.CCHARPP], rffi.CCHARP, - compilation_info=eci) - -dg_freedtoa = rffi.llexternal( - '_Py_dg_freedtoa', [rffi.CCHARP], lltype.Void, - compilation_info=eci) - -def strtod(input): - with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as end_ptr: - with rffi.scoped_str2charp(input) as ll_input: - result = dg_strtod(ll_input, end_ptr) - if end_ptr[0] and ord(end_ptr[0][0]): - offset = (rffi.cast(rffi.LONG, end_ptr[0]) - - rffi.cast(rffi.LONG, ll_input)) - raise ValueError("invalid input at position %d" % (offset,)) - return result - -def dtoa(value, mode=0, precision=0): - builder = StringBuilder(20) - with lltype.scoped_alloc(rffi.INTP.TO, 1) as decpt_ptr: - with lltype.scoped_alloc(rffi.INTP.TO, 1) as sign_ptr: - with lltype.scoped_alloc(rffi.CCHARPP.TO, 1) as end_ptr: - output_ptr = dg_dtoa(value, mode, precision, - decpt_ptr, sign_ptr, end_ptr) - try: - buflen = (rffi.cast(rffi.LONG, end_ptr[0]) - - rffi.cast(rffi.LONG, output_ptr)) - intpart = rffi.cast(lltype.Signed, decpt_ptr[0]) - if intpart <= buflen: - builder.append(rffi.charpsize2str(output_ptr, intpart)) - else: - builder.append(rffi.charpsize2str(output_ptr, buflen)) - while buflen < intpart: - builder.append('0') - intpart -= 1 - builder.append('.') - fracpart = buflen - intpart - if fracpart > 0: - ptr = rffi.ptradd(output_ptr, intpart) - builder.append(rffi.charpsize2str(ptr, fracpart)) - finally: - dg_freedtoa(output_ptr) - return builder.build() - -def test_strtod(): - assert strtod("12345") == 12345.0 - assert strtod("1.1") == 1.1 - assert strtod("3.47") == 3.47 - raises(ValueError, strtod, "123A") - -def test_dtoa(): - assert dtoa(3.47) == "3.47" - assert dtoa(1.1) == "1.1" - assert dtoa(12.3577) == "12.3577" - assert dtoa(10) == "10." - assert dtoa(1e100) == "1" + "0" * 100 + "." diff --git a/pypy/module/mmap/app_mmap.py b/pypy/module/mmap/app_mmap.py deleted file mode 100644 --- a/pypy/module/mmap/app_mmap.py +++ /dev/null @@ -1,5 +0,0 @@ -ACCESS_READ = 1 -ACCESS_WRITE = 2 -ACCESS_COPY = 3 - -error = EnvironmentError diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py --- a/pypy/jit/backend/arm/regalloc.py +++ b/pypy/jit/backend/arm/regalloc.py @@ -276,27 +276,26 @@ boxes.append(box) self.possibly_free_vars(boxes) + self.possibly_free_vars_for_op(op) res = self.force_allocate_reg(op.result) self.possibly_free_var(op.result) return [reg1, reg2, res] def prepare_guard_int_mul_ovf(self, op, guard, fcond): - args = [] boxes = list(op.getarglist()) a0, a1 = boxes - reg1, box = self._ensure_value_is_boxed(a0,forbidden_vars=boxes) + reg1, box = self._ensure_value_is_boxed(a0, forbidden_vars=boxes) boxes.append(box) - reg2, box = self._ensure_value_is_boxed(a1,forbidden_vars=boxes) + reg2, box = self._ensure_value_is_boxed(a1, forbidden_vars=boxes) boxes.append(box) - res = self.force_allocate_reg(op.result, boxes) + res = self.force_allocate_reg(op.result) + args = self._prepare_guard(guard, [reg1, reg2, res]) - args.append(reg1) - args.append(reg2) - args.append(res) - args = self._prepare_guard(guard, args) self.possibly_free_vars(boxes) + self.possibly_free_vars_for_op(op) self.possibly_free_var(op.result) + self.possibly_free_vars(guard.getfailargs()) return args @@ -363,6 +362,7 @@ arg = op.getarg(i) if arg: args.append(self.loc(arg)) + self.possibly_free_var(arg) else: args.append(None) return args @@ -371,6 +371,7 @@ l0, box = self._ensure_value_is_boxed(op.getarg(0)) args = self._prepare_guard(op, [l0]) self.possibly_free_var(box) + self.possibly_free_vars(op.getfailargs()) return args prepare_op_guard_false = prepare_op_guard_true @@ -391,10 +392,13 @@ assert op.result is None arglocs = self._prepare_guard(op, [l0, l1]) self.possibly_free_vars(boxes) + self.possibly_free_vars(op.getfailargs()) return arglocs def prepare_op_guard_no_overflow(self, op, fcond): - return self._prepare_guard(op) + locs = self._prepare_guard(op) + self.possibly_free_vars(op.getfailargs()) + return locs prepare_op_guard_overflow = prepare_op_guard_no_overflow @@ -415,6 +419,7 @@ pos_exception = imm(self.cpu.pos_exception()) arglocs = self._prepare_guard(op, [loc, loc1, resloc, pos_exc_value, pos_exception]) self.possibly_free_vars(boxes) + self.possibly_free_vars(op.getfailargs()) return arglocs def prepare_op_guard_no_exception(self, op, fcond): @@ -422,6 +427,7 @@ ConstInt(self.cpu.pos_exception())) arglocs = self._prepare_guard(op, [loc]) self.possibly_free_var(box) + self.possibly_free_vars(op.getfailargs()) return arglocs def prepare_op_guard_class(self, op, fcond): @@ -447,6 +453,7 @@ boxes.append(offset_box) arglocs = self._prepare_guard(op, [x, y, offset_loc]) self.possibly_free_vars(boxes) + self.possibly_free_vars(op.getfailargs()) return arglocs @@ -454,7 +461,8 @@ def prepare_op_jump(self, op, fcond): descr = op.getdescr() assert isinstance(descr, LoopToken) - return [self.loc(op.getarg(i)) for i in range(op.numargs())] + locs = [self.loc(op.getarg(i)) for i in range(op.numargs())] + return locs def prepare_op_setfield_gc(self, op, fcond): @@ -465,8 +473,14 @@ boxes.append(base_box) value_loc, value_box = self._ensure_value_is_boxed(a1, boxes) boxes.append(value_box) + c_ofs = ConstInt(ofs) + if _check_imm_arg(c_ofs): + ofs_loc = imm(ofs) + else: + ofs_loc, ofs_box = self._ensure_value_is_boxed(c_ofs, boxes) + boxes.append(ofs_box) self.possibly_free_vars(boxes) - return [value_loc, base_loc, imm(ofs), imm(size)] + return [value_loc, base_loc, ofs_loc, imm(size)] prepare_op_setfield_raw = prepare_op_setfield_gc @@ -474,11 +488,17 @@ a0 = op.getarg(0) ofs, size, ptr = self._unpack_fielddescr(op.getdescr()) base_loc, base_box = self._ensure_value_is_boxed(a0) + c_ofs = ConstInt(ofs) + if _check_imm_arg(c_ofs): + ofs_loc = imm(ofs) + else: + ofs_loc, ofs_box = self._ensure_value_is_boxed(c_ofs, [base_box]) + self.possibly_free_var(ofs_box) self.possibly_free_var(a0) self.possibly_free_var(base_box) res = self.force_allocate_reg(op.result) self.possibly_free_var(op.result) - return [base_loc, imm(ofs), res, imm(size)] + return [base_loc, ofs_loc, res, imm(size)] prepare_op_getfield_raw = prepare_op_getfield_gc prepare_op_getfield_raw_pure = prepare_op_getfield_gc @@ -651,7 +671,6 @@ else: argloc, box = self._ensure_value_is_boxed(arg) self.possibly_free_var(box) - self.possibly_free_vars_for_op(op) resloc = self.force_allocate_reg(op.result) self.possibly_free_var(op.result) @@ -743,16 +762,28 @@ fail_index = self.cpu.get_fail_descr_number(faildescr) self.assembler._write_fail_index(fail_index) args = [imm(rffi.cast(lltype.Signed, op.getarg(0).getint()))] - # force all reg values to be spilled when calling - self.assembler.emit_op_call(op, args, self, fcond, spill_all_regs=True) - - return self._prepare_guard(guard_op) + for v in guard_op.getfailargs(): + if v in self.reg_bindings: + self.force_spill_var(v) + self.assembler.emit_op_call(op, args, self, fcond) + locs = self._prepare_guard(guard_op) + self.possibly_free_vars(guard_op.getfailargs()) + return locs def prepare_guard_call_assembler(self, op, guard_op, fcond): - faildescr = guard_op.getdescr() - fail_index = self.cpu.get_fail_descr_number(faildescr) - self.assembler._write_fail_index(fail_index) - return [] + descr = op.getdescr() + assert isinstance(descr, LoopToken) + jd = descr.outermost_jitdriver_sd + assert jd is not None + size = jd.portal_calldescr.get_result_size(self.cpu.translate_support_code) + vable_index = jd.index_of_virtualizable + if vable_index >= 0: + self._sync_var(op.getarg(vable_index)) + vable = self.frame_manager.loc(op.getarg(vable_index)) + else: + vable = imm(0) + self.possibly_free_vars(guard_op.getfailargs()) + return [imm(size), vable] def _prepare_args_for_new_op(self, new_args): gc_ll_descr = self.cpu.gc_ll_descr diff --git a/pypy/module/readline/c_readline.py b/pypy/module/readline/c_readline.py deleted file mode 100644 --- a/pypy/module/readline/c_readline.py +++ /dev/null @@ -1,77 +0,0 @@ -from pypy.rpython.tool import rffi_platform as platform -from pypy.rpython.lltypesystem import lltype, rffi -from pypy.interpreter.error import OperationError -from pypy.interpreter.gateway import ObjSpace, interp2app -from pypy.translator.tool.cbuild import ExternalCompilationInfo - -# On various platforms, linking only with libreadline is not enough; -# we also need to link with some variant of curses or libtermcap. -# We follow the logic of CPython below. -def try_with_lib(extralibs, **kwds): - global most_recent_error - # at least on Gentoo Linux, readline.h doesn't compile if stdio.h is not - # included before - eci = ExternalCompilationInfo( - includes = ["stdio.h", "readline/readline.h", "readline/history.h"], - libraries = extralibs + ['readline'], - ) - try: - platform.verify_eci(eci) - return eci - except platform.CompilationError, e: - most_recent_error = e - return None - -eci = (try_with_lib([]) or - try_with_lib(['ncursesw']) or - try_with_lib(['ncurses']) or - try_with_lib(['curses']) or - try_with_lib(['termcap'], library_dirs=['/usr/lib/termcap'])) -if eci is None: - raise most_recent_error - -# ____________________________________________________________ - -def external(name, args, result): - return rffi.llexternal(name, args, result, compilation_info=eci) - -# get a binding to c library functions and define their args and return types -# char *readline(char *) -c_readline = external('readline', [rffi.CCHARP], rffi.CCHARP) - -# void rl_initiliaze(void) -c_rl_initialize = external('rl_initialize', [], lltype.Void) - -# void using_history(void) -c_using_history = external('using_history', [], lltype.Void) - -# void add_history(const char *) -c_add_history = external('add_history', [rffi.CCHARP], lltype.Void) - -#------------------------------------------------------------ -# special initialization of readline - -class ReadlineState(object): - lastline = "" # XXX possibly temporary hack -readlinestate = ReadlineState() - -def setup_readline(space, w_module): - c_using_history() - # XXX CPython initializes more stuff here - c_rl_initialize() - # install sys.__raw_input__, a hook that will be used by raw_input() - space.setitem(space.sys.w_dict, space.wrap('__raw_input__'), - space.wrap(app_readline_func)) - -def readline_func(space, prompt): - ll_res = c_readline(prompt) - if not ll_res: - raise OperationError(space.w_EOFError, space.w_None) - res = rffi.charp2str(ll_res) - if res and res != readlinestate.lastline: - readlinestate.lastline = res - c_add_history(res) - return space.wrap(res) - -readline_func.unwrap_spec = [ObjSpace, str] -app_readline_func = interp2app(readline_func) diff --git a/pypy/doc/config/objspace.usemodules.readline.txt b/pypy/doc/config/objspace.usemodules.readline.txt deleted file mode 100644 --- a/pypy/doc/config/objspace.usemodules.readline.txt +++ /dev/null @@ -1,1 +0,0 @@ -Use the 'readline' module. diff --git a/pypy/module/readline/app_stub.py b/pypy/module/readline/app_stub.py deleted file mode 100644 --- a/pypy/module/readline/app_stub.py +++ /dev/null @@ -1,13 +0,0 @@ -# NOT_RPYTHON - -def stub(*args, **kwds): - import warnings - warnings.warn("the 'readline' module is only a stub so far") - -def stub_str(*args, **kwds): - stub() - return '' - -def stub_int(*args, **kwds): - stub() - return 0 diff --git a/pypy/translator/c/src/math.c b/pypy/translator/c/src/math.c deleted file mode 100644 --- a/pypy/translator/c/src/math.c +++ /dev/null @@ -1,256 +0,0 @@ -/* Definitions of some C99 math library functions, for those platforms - that don't implement these functions already. */ - -#include - -/* The following macros are copied from CPython header files */ - -#ifdef _MSC_VER -#include -#define PyPy_IS_NAN _isnan -#define PyPy_IS_INFINITY(X) (!_finite(X) && !_isnan(X)) -#define copysign _copysign -#else -#define PyPy_IS_NAN(X) ((X) != (X)) -#define PyPy_IS_INFINITY(X) ((X) && \ - (Py_FORCE_DOUBLE(X)*0.5 == Py_FORCE_DOUBLE(X))) -#endif - -#undef PyPy_NAN - -int -_pypy_math_isinf(double x) -{ - return PyPy_IS_INFINITY(x); -} - -int -_pypy_math_isnan(double x) -{ - return PyPy_IS_NAN(x); -} - -/* The following copyright notice applies to the original - implementations of acosh, asinh and atanh. */ - -/* - * ==================================================== - * Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved. - * - * Developed at SunPro, a Sun Microsystems, Inc. business. - * Permission to use, copy, modify, and distribute this - * software is freely granted, provided that this notice - * is preserved. - * ==================================================== - */ - -double _pypy_math_log1p(double x); - -static const double ln2 = 6.93147180559945286227E-01; -static const double two_pow_m28 = 3.7252902984619141E-09; /* 2**-28 */ -static const double two_pow_p28 = 268435456.0; /* 2**28 */ -static const double zero = 0.0; - -/* acosh(x) - * Method : - * Based on - * acosh(x) = log [ x + sqrt(x*x-1) ] - * we have - * acosh(x) := log(x)+ln2, if x is large; else - * acosh(x) := log(2x-1/(sqrt(x*x-1)+x)) if x>2; else - * acosh(x) := log1p(t+sqrt(2.0*t+t*t)); where t=x-1. - * - * Special cases: - * acosh(x) is NaN with signal if x<1. - * acosh(NaN) is NaN without signal. - */ - -double -_pypy_math_acosh(double x) -{ - if (PyPy_IS_NAN(x)) { - return x+x; - } - if (x < 1.) { /* x < 1; return a signaling NaN */ - errno = EDOM; -#ifdef PyPy_NAN - return PyPy_NAN; -#else - return (x-x)/(x-x); -#endif - } - else if (x >= two_pow_p28) { /* x > 2**28 */ - if (PyPy_IS_INFINITY(x)) { - return x+x; - } else { - return log(x)+ln2; /* acosh(huge)=log(2x) */ - } - } - else if (x == 1.) { - return 0.0; /* acosh(1) = 0 */ - } - else if (x > 2.) { /* 2 < x < 2**28 */ - double t = x*x; - return log(2.0*x - 1.0 / (x + sqrt(t - 1.0))); - } - else { /* 1 < x <= 2 */ - double t = x - 1.0; - return _pypy_math_log1p(t + sqrt(2.0*t + t*t)); - } -} - - -/* asinh(x) - * Method : - * Based on - * asinh(x) = sign(x) * log [ |x| + sqrt(x*x+1) ] - * we have - * asinh(x) := x if 1+x*x=1, - * := sign(x)*(log(x)+ln2)) for large |x|, else - * := sign(x)*log(2|x|+1/(|x|+sqrt(x*x+1))) if|x|>2, else - * := sign(x)*log1p(|x| + x^2/(1 + sqrt(1+x^2))) - */ - -double -_pypy_math_asinh(double x) -{ - double w; - double absx = fabs(x); - - if (PyPy_IS_NAN(x) || PyPy_IS_INFINITY(x)) { - return x+x; - } - if (absx < two_pow_m28) { /* |x| < 2**-28 */ - return x; /* return x inexact except 0 */ - } - if (absx > two_pow_p28) { /* |x| > 2**28 */ - w = log(absx)+ln2; - } - else if (absx > 2.0) { /* 2 < |x| < 2**28 */ - w = log(2.0*absx + 1.0 / (sqrt(x*x + 1.0) + absx)); - } - else { /* 2**-28 <= |x| < 2= */ - double t = x*x; - w = _pypy_math_log1p(absx + t / (1.0 + sqrt(1.0 + t))); - } - return copysign(w, x); - -} - -/* atanh(x) - * Method : - * 1.Reduced x to positive by atanh(-x) = -atanh(x) - * 2.For x>=0.5 - * 1 2x x - * atanh(x) = --- * log(1 + -------) = 0.5 * log1p(2 * --------) - * 2 1 - x 1 - x - * - * For x<0.5 - * atanh(x) = 0.5*log1p(2x+2x*x/(1-x)) - * - * Special cases: - * atanh(x) is NaN if |x| >= 1 with signal; - * atanh(NaN) is that NaN with no signal; - * - */ - -double -_pypy_math_atanh(double x) -{ - double absx; - double t; - - if (PyPy_IS_NAN(x)) { - return x+x; - } - absx = fabs(x); - if (absx >= 1.) { /* |x| >= 1 */ - errno = EDOM; -#ifdef PyPy_NAN - return PyPy_NAN; -#else - return x/zero; -#endif - } - if (absx < two_pow_m28) { /* |x| < 2**-28 */ - return x; - } - if (absx < 0.5) { /* |x| < 0.5 */ - t = absx+absx; - t = 0.5 * _pypy_math_log1p(t + t*absx / (1.0 - absx)); - } - else { /* 0.5 <= |x| <= 1.0 */ - t = 0.5 * _pypy_math_log1p((absx + absx) / (1.0 - absx)); - } - return copysign(t, x); -} - -/* Mathematically, expm1(x) = exp(x) - 1. The expm1 function is designed - to avoid the significant loss of precision that arises from direct - evaluation of the expression exp(x) - 1, for x near 0. */ - -double -_pypy_math_expm1(double x) -{ - /* For abs(x) >= log(2), it's safe to evaluate exp(x) - 1 directly; this - also works fine for infinities and nans. - - For smaller x, we can use a method due to Kahan that achieves close to - full accuracy. - */ - - if (fabs(x) < 0.7) { - double u; - u = exp(x); - if (u == 1.0) - return x; - else - return (u - 1.0) * x / log(u); - } - else - return exp(x) - 1.0; -} - -/* log1p(x) = log(1+x). The log1p function is designed to avoid the - significant loss of precision that arises from direct evaluation when x is - small. */ - -double -_pypy_math_log1p(double x) -{ - /* For x small, we use the following approach. Let y be the nearest float - to 1+x, then - - 1+x = y * (1 - (y-1-x)/y) - - so log(1+x) = log(y) + log(1-(y-1-x)/y). Since (y-1-x)/y is tiny, the - second term is well approximated by (y-1-x)/y. If abs(x) >= - DBL_EPSILON/2 or the rounding-mode is some form of round-to-nearest - then y-1-x will be exactly representable, and is computed exactly by - (y-1)-x. - - If abs(x) < DBL_EPSILON/2 and the rounding mode is not known to be - round-to-nearest then this method is slightly dangerous: 1+x could be - rounded up to 1+DBL_EPSILON instead of down to 1, and in that case - y-1-x will not be exactly representable any more and the result can be - off by many ulps. But this is easily fixed: for a floating-point - number |x| < DBL_EPSILON/2., the closest floating-point number to - log(1+x) is exactly x. - */ - - double y; - if (fabs(x) < DBL_EPSILON/2.) { - return x; - } else if (-0.5 <= x && x <= 1.) { - /* WARNING: it's possible than an overeager compiler - will incorrectly optimize the following two lines - to the equivalent of "return log(1.+x)". If this - happens, then results from log1p will be inaccurate - for small x. */ - y = 1.+x; - return log(y)-((y-1.)-x)/y; - } else { - /* NaNs and infinities should end up here */ - return log(1.+x); - } -} diff --git a/pypy/jit/backend/arm/codebuilder.py b/pypy/jit/backend/arm/codebuilder.py --- a/pypy/jit/backend/arm/codebuilder.py +++ b/pypy/jit/backend/arm/codebuilder.py @@ -18,7 +18,7 @@ def f(self, c=cond.AL): """Generates a call to a helper function, takes its arguments in r0 and r1, result is placed in r0""" - addr = rffi.cast(lltype.Signed, llhelper(signature, function)) + addr = rffi.cast(lltype.Signed, function) if c == cond.AL: self.BL(addr) else: @@ -131,7 +131,7 @@ assert c == cond.AL self.LDR_ri(reg.ip.value, reg.pc.value, cond=c) self.SUB_rr(reg.pc.value, reg.pc.value, reg.ip.value, cond=c) - target += 2 * WORD + target += WORD self.write32(target) def BL(self, target, c=cond.AL): @@ -172,19 +172,30 @@ def currpos(self): raise NotImplementedError - size_of_gen_load_int = 4 * WORD - ofs_shift = zip(range(8, 25, 8), range(12, 0, -4)) + size_of_gen_load_int = 3 * WORD def gen_load_int(self, r, value, cond=cond.AL): """r is the register number, value is the value to be loaded to the register""" - self.MOV_ri(r, (value & 0xFF), cond=cond) + from pypy.jit.backend.arm.conditions import AL + if cond != AL or 0 <= value <= 0xFFFF: + self._load_by_shifting(r, value, cond) + else: + self.LDR_ri(r, reg.pc.value) + self.MOV_rr(reg.pc.value, reg.pc.value) + self.write32(value) + + #size_of_gen_load_int = 4 * WORD + ofs_shift = zip(range(8, 25, 8), range(12, 0, -4)) + def _load_by_shifting(self, r, value, c=cond.AL): + # to be sure it is only called for the correct cases + assert c != cond.AL or 0 <= value <= 0xFFFF + self.MOV_ri(r, (value & 0xFF), cond=c) for offset, shift in self.ofs_shift: b = (value >> offset) & 0xFF if b == 0: continue t = b | (shift << 8) - self.ORR_ri(r, r, imm=t, cond=cond) - + self.ORR_ri(r, r, imm=t, cond=c) class OverwritingBuilder(AbstractARMv7Builder): def __init__(self, cb, start, size): diff --git a/pypy/module/readline/test/test_with_pypy.py b/pypy/module/readline/test/test_with_pypy.py deleted file mode 100644 --- a/pypy/module/readline/test/test_with_pypy.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Test the readline library on top of PyPy. The following tests run -in the PyPy interpreter, itself running on top of CPython -""" - -import py -from pypy.conftest import gettestobjspace -from pypy.rpython.tool import rffi_platform as platform - -try: - from pypy.module.readline import c_readline -except platform.CompilationError, e: - py.test.skip(e) - - -class AppTestReadline: - - def setup_class(cls): - # enable usage of the readline mixedmodule - space = gettestobjspace(usemodules=('readline',)) - cls.space = space - - def test_basic_import(self): - # this is interpreted by PyPy - import readline - readline.readline - # XXX test more diff --git a/pypy/module/signal/app_signal.py b/pypy/module/signal/app_signal.py deleted file mode 100644 --- a/pypy/module/signal/app_signal.py +++ /dev/null @@ -1,10 +0,0 @@ - - -def default_int_handler(signum, frame): - """ - default_int_handler(...) - - The default handler for SIGINT installed by Python. - It raises KeyboardInterrupt. - """ - raise KeyboardInterrupt() diff --git a/pypy/module/itertools/test/errors.txt b/pypy/module/itertools/test/errors.txt deleted file mode 100644 --- a/pypy/module/itertools/test/errors.txt +++ /dev/null @@ -1,67 +0,0 @@ - - -Here are the remaining errors of CPython 2.5's test_itertools. FWIW I -consider them all as obscure undocumented implementation details. - - -====================================================================== -ERROR: test_islice (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "test_itertools.py", line 285, in test_islice - self.assertRaises(ValueError, islice, xrange(10), 'a') - File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 322, in failUnlessRaises - return - File "/home/arigo/pypysrc/lib-python/2.4.1/unittest.py", line 320, in failUnlessRaises - callableObj(*args, **kwargs) -TypeError: expected integer, got str object - -====================================================================== -ERROR: test_tee (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 376, in test_tee - c = type(a)('def') -TypeError: default __new__ takes no parameters - -====================================================================== -ERROR: test_repeat (__main__.LengthTransparency) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 690, in test_repeat - from test.test_iterlen import len -ImportError: cannot import name 'len' - -====================================================================== -ERROR: test_keywords_in_subclass (__main__.SubclassWithKwargsTest) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 760, in test_keywords_in_subclass - class Subclass(cls): -TypeError: type 'repeat' is not an acceptable base class - -====================================================================== -FAIL: test_count (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 59, in test_count - self.assertEqual(repr(c), 'count(3)') -AssertionError: '' != 'count(3)' - -====================================================================== -FAIL: test_izip (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 199, in test_izip - self.assertEqual(min(ids), max(ids)) -AssertionError: 149283404 != 150789644 - -====================================================================== -FAIL: test_repeat (__main__.TestBasicOps) ----------------------------------------------------------------------- -Traceback (most recent call last): - File "itest25.py", line 214, in test_repeat - self.assertEqual(repr(r), 'repeat((1+0j))') -AssertionError: '' != 'repeat((1+0j))' - ----------------------------------------------------------------------- diff --git a/pypy/jit/backend/arm/runner.py b/pypy/jit/backend/arm/runner.py --- a/pypy/jit/backend/arm/runner.py +++ b/pypy/jit/backend/arm/runner.py @@ -60,6 +60,12 @@ def get_latest_value_count(self): return self.assembler.fail_boxes_count + def get_latest_value_count(self): + return self.assembler.fail_boxes_count + + def get_latest_force_token(self): + return self.assembler.fail_force_index + def clear_latest_values(self, count): setitem = self.assembler.fail_boxes_ptr.setitem null = lltype.nullptr(llmemory.GCREF.TO) @@ -111,3 +117,6 @@ # end of "no gc operation!" block assert fail_index == fail_index_2 return faildescr + + def redirect_call_assembler(self, oldlooptoken, newlooptoken): + self.assembler.redirect_call_assembler(oldlooptoken, newlooptoken) diff --git a/pypy/module/math/_genmath.py b/pypy/module/math/_genmath.py deleted file mode 100644 --- a/pypy/module/math/_genmath.py +++ /dev/null @@ -1,62 +0,0 @@ -# ONESHOT SCRIPT (probably can go away soon) -# to generate the mixed module 'math' (see same directory) -import py -import math -import re -import sys -rex_arg = re.compile(".*\((.*)\).*") - -if __name__ == '__main__': - print py.code.Source(""" - import math - from pypy.interpreter.gateway import ObjSpace - - """) - names = [] - for name, func in math.__dict__.items(): - if not callable(func): - continue - sig = func.__doc__.split('\n')[0].strip() - sig = sig.split('->')[0].strip() - m = rex_arg.match(sig) - assert m - args = m.group(1) - args = ", ".join(args.split(',')) - sig = sig.replace('(', '(space,') - sig = ", ".join(sig.split(',')) - argc = len(args.split(',')) - unwrap_spec = ['ObjSpace'] - unwrap_spec += ['float'] * argc - unwrap_spec = ", ".join(unwrap_spec) - doc = func.__doc__.replace('\n', '\n ') - - print py.code.Source(''' - def %(sig)s: - """%(doc)s - """ - return space.wrap(math.%(name)s(%(args)s)) - %(name)s.unwrap_spec = [%(unwrap_spec)s] - ''' % locals()) - names.append(name) - - print >>sys.stderr, py.code.Source(""" - # Package initialisation - from pypy.interpreter.mixedmodule import MixedModule - - class Module(MixedModule): - appleveldefs = { - } - interpleveldefs = { - """) - - for name in names: - space = " " * (15-len(name)) - print >>sys.stderr, ( - " %(name)r%(space)s: 'interp_math.%(name)s'," % locals()) - print >>sys.stderr, py.code.Source(""" - } - """) - - - - diff --git a/pypy/module/zlib/app_zlib.py b/pypy/module/zlib/app_zlib.py deleted file mode 100644 --- a/pypy/module/zlib/app_zlib.py +++ /dev/null @@ -1,11 +0,0 @@ - -""" -Application-level definitions for the zlib module. - -NOT_RPYTHON -""" - -class error(Exception): - """ - Raised by zlib operations. - """ diff --git a/pypy/jit/backend/arm/test/test_assembler.py b/pypy/jit/backend/arm/test/test_assembler.py --- a/pypy/jit/backend/arm/test/test_assembler.py +++ b/pypy/jit/backend/arm/test/test_assembler.py @@ -60,6 +60,18 @@ self.a.gen_func_epilog() assert run_asm(self.a) == -3 + def test_load_int1(self): + self.a.gen_func_prolog() + self.a.mc.gen_load_int(r.r0.value, 440) + self.a.gen_func_epilog() + assert run_asm(self.a) == 440 + + def test_load_int2(self): + self.a.gen_func_prolog() + self.a.mc.gen_load_int(r.r0.value, 464) + self.a.gen_func_epilog() + assert run_asm(self.a) == 464 + def test_or(self): self.a.gen_func_prolog() @@ -167,7 +179,7 @@ # call to div self.a.mc.PUSH(range(2, 12)) - div_addr = rffi.cast(lltype.Signed, llhelper(arm_int_div_sign, arm_int_div)) + div_addr = rffi.cast(lltype.Signed, arm_int_div) self.a.mc.BL(div_addr) self.a.mc.POP(range(2, 12)) self.a.gen_func_epilog() @@ -221,8 +233,16 @@ self.a.mov_loc_loc(imm(2478), r.r0) self.a.gen_func_epilog() assert run_asm(self.a) == 2478 - + def test_load_store(self): + x = 0x60002224 + self.a.gen_func_prolog() + self.a.mc.gen_load_int(r.r1.value, x) + self.a.mc.MOV_ri(r.r3.value, 8) + self.a.mc.STR_rr(r.r1.value, r.fp.value, r.r3.value) + self.a.mc.LDR_ri(r.r0.value, r.fp.value, 8) + self.a.gen_func_epilog() + assert run_asm(self.a) == x def callme(inp): i = inp + 10 diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -14,9 +14,15 @@ from pypy.rpython.ootypesystem import ootype from pypy.rpython.annlowlevel import llhelper from pypy.rpython.llinterp import LLException -from pypy.jit.codewriter import heaptracker +from pypy.jit.codewriter import heaptracker, longlong from pypy.rlib.rarithmetic import intmask +def boxfloat(x): + return BoxFloat(longlong.getfloatstorage(x)) + +def constfloat(x): + return ConstFloat(longlong.getfloatstorage(x)) + class Runner(object): @@ -36,7 +42,7 @@ self.cpu.set_future_value_ref(j, box.getref_base()) j += 1 elif isinstance(box, BoxFloat): - self.cpu.set_future_value_float(j, box.getfloat()) + self.cpu.set_future_value_float(j, box.getfloatstorage()) j += 1 else: raise NotImplementedError(box) @@ -360,7 +366,10 @@ from pypy.jit.metainterp.test.test_executor import get_float_tests for opnum, boxargs, rettype, retvalue in get_float_tests(self.cpu): res = self.execute_operation(opnum, boxargs, rettype) - assert res.value == retvalue + if isinstance(res, BoxFloat): + assert res.getfloat() == retvalue + else: + assert res.value == retvalue def test_ovf_operations(self, reversed=False): minint = -sys.maxint-1 @@ -432,6 +441,8 @@ assert x == ord('B') if cpu.supports_floats: def func(f, i): + assert isinstance(f, float) + assert isinstance(i, int) return f - float(i) FPTR = self.Ptr(self.FuncType([lltype.Float, lltype.Signed], lltype.Float)) @@ -440,8 +451,8 @@ calldescr = cpu.calldescrof(FTP, FTP.ARGS, FTP.RESULT) x = cpu.bh_call_f(self.get_funcbox(cpu, func_ptr).value, calldescr, - [42], None, [3.5]) - assert x == 3.5 - 42 + [42], None, [longlong.getfloatstorage(3.5)]) + assert longlong.getrealfloat(x) == 3.5 - 42 def test_call(self): from pypy.rlib.libffi import types @@ -495,13 +506,13 @@ func_ptr = llhelper(FPTR, func) calldescr = cpu.calldescrof(FUNC, FUNC.ARGS, FUNC.RESULT) funcbox = self.get_funcbox(cpu, func_ptr) - args = ([BoxFloat(.1) for i in range(7)] + - [BoxInt(1), BoxInt(2), BoxFloat(.2), BoxFloat(.3), - BoxFloat(.4)]) + args = ([boxfloat(.1) for i in range(7)] + + [BoxInt(1), BoxInt(2), boxfloat(.2), boxfloat(.3), + boxfloat(.4)]) res = self.execute_operation(rop.CALL, [funcbox] + args, 'float', descr=calldescr) - assert abs(res.value - 4.6) < 0.0001 + assert abs(res.getfloat() - 4.6) < 0.0001 def test_call_many_arguments(self): # Test calling a function with a large number of arguments (more than @@ -563,6 +574,22 @@ descr=calldescr) assert res.value == ord('a') + def test_call_with_const_floats(self): + if not self.cpu.supports_floats: + py.test.skip("requires floats") + def func(f1, f2): + return f1 + f2 + + FUNC = self.FuncType([lltype.Float, lltype.Float], lltype.Float) + FPTR = self.Ptr(FUNC) + calldescr = self.cpu.calldescrof(FUNC, FUNC.ARGS, FUNC.RESULT) + func_ptr = llhelper(FPTR, func) + funcbox = self.get_funcbox(self.cpu, func_ptr) + res = self.execute_operation(rop.CALL, [funcbox, constfloat(1.5), + constfloat(2.5)], 'float', + descr=calldescr) + assert res.getfloat() == 4.0 + def test_field_basic(self): t_box, T_box = self.alloc_instance(self.T) @@ -615,17 +642,17 @@ assert res.value == null_const.value if self.cpu.supports_floats: floatdescr = self.cpu.fielddescrof(self.S, 'float') - self.execute_operation(rop.SETFIELD_GC, [t_box, BoxFloat(3.4)], + self.execute_operation(rop.SETFIELD_GC, [t_box, boxfloat(3.4)], 'void', descr=floatdescr) res = self.execute_operation(rop.GETFIELD_GC, [t_box], 'float', descr=floatdescr) - assert res.value == 3.4 + assert res.getfloat() == 3.4 # - self.execute_operation(rop.SETFIELD_GC, [t_box, ConstFloat(-3.6)], + self.execute_operation(rop.SETFIELD_GC, [t_box, constfloat(-3.6)], 'void', descr=floatdescr) res = self.execute_operation(rop.GETFIELD_GC, [t_box], 'float', descr=floatdescr) - assert res.value == -3.6 + assert res.getfloat() == -3.6 def test_passing_guards(self): @@ -641,7 +668,7 @@ (rop.GUARD_ISNULL, [nullbox]) ]) if self.cpu.supports_floats: - all.append((rop.GUARD_VALUE, [BoxFloat(3.5), ConstFloat(3.5)])) + all.append((rop.GUARD_VALUE, [boxfloat(3.5), constfloat(3.5)])) for (opname, args) in all: assert self.execute_operation(opname, args, 'void') == None assert not self.guard_failed @@ -667,7 +694,7 @@ (rop.GUARD_NONNULL, [nullbox]), (rop.GUARD_ISNULL, [t_box])]) if self.cpu.supports_floats: - all.append((rop.GUARD_VALUE, [BoxFloat(-1.0), ConstFloat(1.0)])) + all.append((rop.GUARD_VALUE, [boxfloat(-1.0), constfloat(1.0)])) for opname, args in all: assert self.execute_operation(opname, args, 'void') == None assert self.guard_failed @@ -832,17 +859,17 @@ a_box, A = self.alloc_array_of(lltype.Float, 31) arraydescr = self.cpu.arraydescrof(A) self.execute_operation(rop.SETARRAYITEM_GC, [a_box, BoxInt(1), - BoxFloat(3.5)], + boxfloat(3.5)], 'void', descr=arraydescr) self.execute_operation(rop.SETARRAYITEM_GC, [a_box, BoxInt(2), - ConstFloat(4.5)], + constfloat(4.5)], 'void', descr=arraydescr) r = self.execute_operation(rop.GETARRAYITEM_GC, [a_box, BoxInt(1)], 'float', descr=arraydescr) - assert r.value == 3.5 + assert r.getfloat() == 3.5 r = self.execute_operation(rop.GETARRAYITEM_GC, [a_box, BoxInt(2)], 'float', descr=arraydescr) - assert r.value == 4.5 + assert r.getfloat() == 4.5 # For platforms where sizeof(INT) != sizeof(Signed) (ie, x86-64) a_box, A = self.alloc_array_of(rffi.INT, 342) @@ -942,10 +969,10 @@ assert r.value == u_box.value if self.cpu.supports_floats: - r = self.execute_operation(rop.SAME_AS, [ConstFloat(5.5)], 'float') - assert r.value == 5.5 - r = self.execute_operation(rop.SAME_AS, [BoxFloat(5.5)], 'float') - assert r.value == 5.5 + r = self.execute_operation(rop.SAME_AS, [constfloat(5.5)], 'float') + assert r.getfloat() == 5.5 + r = self.execute_operation(rop.SAME_AS, [boxfloat(5.5)], 'float') + assert r.getfloat() == 5.5 def test_virtual_ref(self): pass # VIRTUAL_REF must not reach the backend nowadays @@ -1016,7 +1043,7 @@ p = lltype.malloc(S) values.append(lltype.cast_opaque_ptr(llmemory.GCREF, p)) elif isinstance(box, BoxFloat): - values.append(r.random()) + values.append(longlong.getfloatstorage(r.random())) else: assert 0 values[index_counter] = 11 @@ -1064,7 +1091,7 @@ faildescr1 = BasicFailDescr(1) faildescr2 = BasicFailDescr(2) operations = [ - ResOperation(rop.FLOAT_LE, [fboxes[0], ConstFloat(9.2)], i2), + ResOperation(rop.FLOAT_LE, [fboxes[0], constfloat(9.2)], i2), ResOperation(rop.GUARD_TRUE, [i2], None, descr=faildescr1), ResOperation(rop.FINISH, fboxes, None, descr=faildescr2), ] @@ -1075,20 +1102,22 @@ fboxes2 = [BoxFloat() for i in range(12)] f3 = BoxFloat() bridge = [ - ResOperation(rop.FLOAT_SUB, [fboxes2[0], ConstFloat(1.0)], f3), + ResOperation(rop.FLOAT_SUB, [fboxes2[0], constfloat(1.0)], f3), ResOperation(rop.JUMP, [f3] + fboxes2[1:], None, descr=looptoken), ] self.cpu.compile_bridge(faildescr1, fboxes2, bridge, looptoken) for i in range(len(fboxes)): - self.cpu.set_future_value_float(i, 13.5 + 6.73 * i) + x = 13.5 + 6.73 * i + self.cpu.set_future_value_float(i, longlong.getfloatstorage(x)) fail = self.cpu.execute_token(looptoken) assert fail.identifier == 2 res = self.cpu.get_latest_value_float(0) - assert res == 8.5 + assert longlong.getrealfloat(res) == 8.5 for i in range(1, len(fboxes)): - assert self.cpu.get_latest_value_float(i) == 13.5 + 6.73 * i + got = longlong.getrealfloat(self.cpu.get_latest_value_float(i)) + assert got == 13.5 + 6.73 * i def test_integers_and_guards(self): for opname, compare in [ @@ -1165,11 +1194,11 @@ if combinaison[0] == 'b': fbox1 = BoxFloat() else: - fbox1 = ConstFloat(-4.5) + fbox1 = constfloat(-4.5) if combinaison[1] == 'b': fbox2 = BoxFloat() else: - fbox2 = ConstFloat(-4.5) + fbox2 = constfloat(-4.5) b1 = BoxInt() faildescr1 = BasicFailDescr(1) faildescr2 = BasicFailDescr(2) @@ -1193,10 +1222,12 @@ if test2 == -4.5 or combinaison[1] == 'b': n = 0 if combinaison[0] == 'b': - cpu.set_future_value_float(n, test1) + cpu.set_future_value_float( + n, longlong.getfloatstorage(test1)) n += 1 if combinaison[1] == 'b': - cpu.set_future_value_float(n, test2) + cpu.set_future_value_float( + n, longlong.getfloatstorage(test2)) n += 1 fail = cpu.execute_token(looptoken) # @@ -1246,7 +1277,7 @@ if isinstance(box, BoxInt): self.cpu.set_future_value_int(i, box.getint()) elif isinstance(box, BoxFloat): - self.cpu.set_future_value_float(i, box.getfloat()) + self.cpu.set_future_value_float(i, box.getfloatstorage()) else: assert 0 # @@ -1257,15 +1288,15 @@ if not self.cpu.supports_floats: py.test.skip("requires floats") - from pypy.rlib.rarithmetic import INFINITY, NAN, isinf, isnan + from pypy.rlib.rfloat import INFINITY, NAN, isinf, isnan from pypy.jit.metainterp.resoperation import opname - fzer = BoxFloat(0.0) - fone = BoxFloat(1.0) - fmqr = BoxFloat(-0.25) - finf = BoxFloat(INFINITY) - fmnf = BoxFloat(-INFINITY) - fnan = BoxFloat(NAN) + fzer = boxfloat(0.0) + fone = boxfloat(1.0) + fmqr = boxfloat(-0.25) + finf = boxfloat(INFINITY) + fmnf = boxfloat(-INFINITY) + fnan = boxfloat(NAN) all_cases_unary = [(a,) for a in [fzer,fone,fmqr,finf,fmnf,fnan]] all_cases_binary = [(a, b) for a in [fzer,fone,fmqr,finf,fmnf,fnan] @@ -1275,7 +1306,7 @@ def nan_and_infinity(opnum, realoperation, testcases): for testcase in testcases: - realvalues = [b.value for b in testcase] + realvalues = [b.getfloat() for b in testcase] expected = realoperation(*realvalues) if isinstance(expected, float): expectedtype = 'float' @@ -1284,15 +1315,17 @@ got = self.execute_operation(opnum, list(testcase), expectedtype) if isnan(expected): - ok = isnan(got.value) + ok = isnan(got.getfloat()) elif isinf(expected): - ok = isinf(got.value) + ok = isinf(got.getfloat()) + elif isinstance(got, BoxFloat): + ok = (got.getfloat() == expected) else: - ok = (got.value == expected) + ok = got.value == expected if not ok: raise AssertionError("%s(%s): got %r, expected %r" % ( opname[opnum], ', '.join(map(repr, realvalues)), - got.value, expected)) + got.getfloat(), expected)) # if we expect a boolean, also check the combination with # a GUARD_TRUE or GUARD_FALSE if isinstance(expected, bool): @@ -1312,7 +1345,8 @@ self.cpu.compile_loop(unique_testcase_list, operations, looptoken) for i, box in enumerate(unique_testcase_list): - self.cpu.set_future_value_float(i, box.value) + self.cpu.set_future_value_float( + i, box.getfloatstorage()) fail = self.cpu.execute_token(looptoken) if fail.identifier != 5 - (expected_id^expected): if fail.identifier == 4: @@ -1783,7 +1817,8 @@ self.cpu.set_future_value_int(1, 0) fail = self.cpu.execute_token(looptoken) assert fail.identifier == 0 - assert self.cpu.get_latest_value_float(0) == 42.5 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 42.5 assert values == [] self.cpu.set_future_value_int(0, 10) @@ -1791,7 +1826,8 @@ fail = self.cpu.execute_token(looptoken) assert fail.identifier == 1 assert self.cpu.get_latest_value_int(0) == 1 - assert self.cpu.get_latest_value_float(1) == 42.5 + x = self.cpu.get_latest_value_float(1) + assert longlong.getrealfloat(x) == 42.5 assert self.cpu.get_latest_value_int(2) == 10 assert values == [1, 10] @@ -1826,9 +1862,10 @@ descr_C = cpu.arraydescrof(C) x = cpu.bh_getarrayitem_gc_f( descr_C, lltype.cast_opaque_ptr(llmemory.GCREF, c), 3) - assert x == 3.5 + assert longlong.getrealfloat(x) == 3.5 cpu.bh_setarrayitem_gc_f( - descr_C, lltype.cast_opaque_ptr(llmemory.GCREF, c), 4, 4.5) + descr_C, lltype.cast_opaque_ptr(llmemory.GCREF, c), 4, + longlong.getfloatstorage(4.5)) assert c[4] == 4.5 s = rstr.mallocstr(6) x = cpu.bh_strlen(lltype.cast_opaque_ptr(llmemory.GCREF, s)) @@ -1882,13 +1919,13 @@ descrfld_z = cpu.fielddescrof(S, 'z') cpu.bh_setfield_gc_f( lltype.cast_opaque_ptr(llmemory.GCREF, s), - descrfld_z, 3.5) + descrfld_z, longlong.getfloatstorage(3.5)) assert s.z == 3.5 s.z = 3.2 x = cpu.bh_getfield_gc_f( lltype.cast_opaque_ptr(llmemory.GCREF, s), descrfld_z) - assert x == 3.2 + assert longlong.getrealfloat(x) == 3.2 ### we don't support in the JIT for now GC pointers ### stored inside non-GC structs. #descrfld_ry = cpu.fielddescrof(RS, 'y') @@ -2044,7 +2081,8 @@ py.test.skip("requires floats") called = [] def assembler_helper(failindex, virtualizable): - assert self.cpu.get_latest_value_float(0) == 1.2 + 3.2 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 1.2 + 3.2 called.append(failindex) return 13.5 @@ -2070,10 +2108,11 @@ looptoken = LoopToken() looptoken.outermost_jitdriver_sd = FakeJitDriverSD() self.cpu.compile_loop(loop.inputargs, loop.operations, looptoken) - self.cpu.set_future_value_float(0, 1.2) - self.cpu.set_future_value_float(1, 2.3) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(1.2)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(2.3)) res = self.cpu.execute_token(looptoken) - assert self.cpu.get_latest_value_float(0) == 1.2 + 2.3 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 1.2 + 2.3 ops = ''' [f4, f5] f3 = call_assembler(f4, f5, descr=looptoken) @@ -2083,10 +2122,11 @@ loop = parse(ops, namespace=locals()) othertoken = LoopToken() self.cpu.compile_loop(loop.inputargs, loop.operations, othertoken) - self.cpu.set_future_value_float(0, 1.2) - self.cpu.set_future_value_float(1, 3.2) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(1.2)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(3.2)) res = self.cpu.execute_token(othertoken) - assert self.cpu.get_latest_value_float(0) == 13.5 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 13.5 assert called # test the fast path, which should not call assembler_helper() @@ -2095,10 +2135,11 @@ try: othertoken = LoopToken() self.cpu.compile_loop(loop.inputargs, loop.operations, othertoken) - self.cpu.set_future_value_float(0, 1.2) - self.cpu.set_future_value_float(1, 3.2) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(1.2)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(3.2)) res = self.cpu.execute_token(othertoken) - assert self.cpu.get_latest_value_float(0) == 1.2 + 3.2 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 1.2 + 3.2 assert not called finally: del self.cpu.done_with_this_frame_float_v @@ -2132,7 +2173,8 @@ py.test.skip("requires floats") called = [] def assembler_helper(failindex, virtualizable): - assert self.cpu.get_latest_value_float(0) == 1.25 + 3.25 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 1.25 + 3.25 called.append(failindex) return 13.5 @@ -2157,10 +2199,11 @@ looptoken = LoopToken() looptoken.outermost_jitdriver_sd = FakeJitDriverSD() self.cpu.compile_loop(loop.inputargs, loop.operations, looptoken) - self.cpu.set_future_value_float(0, 1.25) - self.cpu.set_future_value_float(1, 2.35) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(1.25)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(2.35)) res = self.cpu.execute_token(looptoken) - assert self.cpu.get_latest_value_float(0) == 1.25 + 2.35 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 1.25 + 2.35 assert not called ops = ''' @@ -2174,10 +2217,11 @@ self.cpu.compile_loop(loop.inputargs, loop.operations, othertoken) # normal call_assembler: goes to looptoken - self.cpu.set_future_value_float(0, 1.25) - self.cpu.set_future_value_float(1, 3.25) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(1.25)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(3.25)) res = self.cpu.execute_token(othertoken) - assert self.cpu.get_latest_value_float(0) == 13.5 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 13.5 assert called del called[:] @@ -2195,10 +2239,12 @@ self.cpu.redirect_call_assembler(looptoken, looptoken2) # now, our call_assembler should go to looptoken2 - self.cpu.set_future_value_float(0, 6.0) - self.cpu.set_future_value_float(1, 1.5) # 6.0-1.5 == 1.25+3.25 + self.cpu.set_future_value_float(0, longlong.getfloatstorage(6.0)) + self.cpu.set_future_value_float(1, longlong.getfloatstorage(1.5)) + # 6.0-1.5 == 1.25+3.25 res = self.cpu.execute_token(othertoken) - assert self.cpu.get_latest_value_float(0) == 13.5 + x = self.cpu.get_latest_value_float(0) + assert longlong.getrealfloat(x) == 13.5 assert called def test_short_result_of_getfield_direct(self): @@ -2395,6 +2441,66 @@ assert res.value == expected, ( "%r: got %r, expected %r" % (RESTYPE, res.value, expected)) + def test_supports_longlong(self): + if sys.maxint > 2147483647: + assert not self.cpu.supports_longlong, ( + "supports_longlong should be False on 64-bit platforms") + + def test_longlong_result_of_call_direct(self): + if not self.cpu.supports_longlong: + py.test.skip("longlong test") + from pypy.translator.tool.cbuild import ExternalCompilationInfo + from pypy.rlib.rarithmetic import r_longlong + eci = ExternalCompilationInfo( + separate_module_sources=[""" + long long fn_test_result_of_call(long long x) + { + return x - 100000000000000; + } + """], + export_symbols=['fn_test_result_of_call']) + f = rffi.llexternal('fn_test_result_of_call', [lltype.SignedLongLong], + lltype.SignedLongLong, + compilation_info=eci, _nowrapper=True) + value = r_longlong(0x7ff05af3307a3fff) + expected = r_longlong(0x7ff000001fffffff) + assert f(value) == expected + # + FUNC = self.FuncType([lltype.SignedLongLong], lltype.SignedLongLong) + FPTR = self.Ptr(FUNC) + calldescr = self.cpu.calldescrof(FUNC, FUNC.ARGS, FUNC.RESULT) + x = self.cpu.bh_call_f(self.get_funcbox(self.cpu, f).value, + calldescr, None, None, [value]) + assert x == expected + + def test_longlong_result_of_call_compiled(self): + if not self.cpu.supports_longlong: + py.test.skip("test of longlong result") + from pypy.translator.tool.cbuild import ExternalCompilationInfo + from pypy.rlib.rarithmetic import r_longlong + eci = ExternalCompilationInfo( + separate_module_sources=[""" + long long fn_test_result_of_call(long long x) + { + return x - 100000000000000; + } + """], + export_symbols=['fn_test_result_of_call']) + f = rffi.llexternal('fn_test_result_of_call', [lltype.SignedLongLong], + lltype.SignedLongLong, + compilation_info=eci, _nowrapper=True) + value = r_longlong(0x7ff05af3307a3fff) + expected = r_longlong(0x7ff000001fffffff) + assert f(value) == expected + # + FUNC = self.FuncType([lltype.SignedLongLong], lltype.SignedLongLong) + FPTR = self.Ptr(FUNC) + calldescr = self.cpu.calldescrof(FUNC, FUNC.ARGS, FUNC.RESULT) + funcbox = self.get_funcbox(self.cpu, f) + res = self.execute_operation(rop.CALL, [funcbox, BoxFloat(value)], + 'float', descr=calldescr) + assert res.getfloatstorage() == expected + def test_free_loop_and_bridges(self): from pypy.jit.backend.llsupport.llmodel import AbstractLLCPU if not isinstance(self.cpu, AbstractLLCPU): diff --git a/pypy/module/parser/app_helpers.py b/pypy/module/parser/app_helpers.py deleted file mode 100644 --- a/pypy/module/parser/app_helpers.py +++ /dev/null @@ -1,2 +0,0 @@ -class ParserError(Exception): - pass diff --git a/pypy/module/readline/test/__init__.py b/pypy/module/readline/test/__init__.py deleted file mode 100644 --- a/pypy/module/readline/test/__init__.py +++ /dev/null @@ -1,1 +0,0 @@ -# diff --git a/pypy/module/select/app_select.py b/pypy/module/select/app_select.py deleted file mode 100644 --- a/pypy/module/select/app_select.py +++ /dev/null @@ -1,2 +0,0 @@ -class error(Exception): - pass diff --git a/pypy/objspace/std/test/helper.py b/pypy/objspace/std/test/helper.py deleted file mode 100644 --- a/pypy/objspace/std/test/helper.py +++ /dev/null @@ -1,69 +0,0 @@ -def raises(excp, func, *args): - try: - func(*args) - assert 1 == 0 - except excp:pass - -def assertEqual(a, b): - assert a == b - -def assertNotEqual(a, b): - assert a != b - -def assertIs(a, b): - assert a is b - -# complex specific tests - -EPS = 1e-9 - -def assertAlmostEqual(a, b): - if isinstance(a, complex): - if isinstance(b, complex): - assert a.real - b.real < EPS - assert a.imag - b.imag < EPS - else: - assert a.real - b < EPS - assert a.imag < EPS - else: - if isinstance(b, complex): - assert a - b.real < EPS - assert b.imag < EPS - else: - assert a - b < EPS - -def assertCloseAbs(x, y, eps=1e-9): - """Return true iff floats x and y "are close\"""" - # put the one with larger magnitude second - if abs(x) > abs(y): - x, y = y, x - if y == 0: - return abs(x) < eps - if x == 0: - return abs(y) < eps - # check that relative difference < eps - assert abs((x-y)/y) < eps - -def assertClose(x, y, eps=1e-9): - """Return true iff complexes x and y "are close\"""" - assertCloseAbs(x.real, y.real, eps) - assertCloseAbs(x.imag, y.imag, eps) - - -def check_div(x, y): - """Compute complex z=x*y, and check that z/x==y and z/y==x.""" - z = x * y - if x != 0: - q = z / x - assertClose(q, y) - q = z.__div__(x) - assertClose(q, y) - q = z.__truediv__(x) - assertClose(q, y) - if y != 0: - q = z / y - assertClose(q, x) - q = z.__div__(y) - assertClose(q, x) - q = z.__truediv__(y) - assertClose(q, x) diff --git a/pypy/module/termios/app_termios.py b/pypy/module/termios/app_termios.py deleted file mode 100644 --- a/pypy/module/termios/app_termios.py +++ /dev/null @@ -1,3 +0,0 @@ - -class error(Exception): - pass diff --git a/pypy/module/_ssl/app_ssl.py b/pypy/module/_ssl/app_ssl.py deleted file mode 100644 --- a/pypy/module/_ssl/app_ssl.py +++ /dev/null @@ -1,7 +0,0 @@ -import _socket - -class SSLError(_socket.error): - pass - -__doc__ = """Implementation module for SSL socket operations. -See the socket module for documentation.""" diff --git a/pypy/module/pyexpat/app_pyexpat.py b/pypy/module/pyexpat/app_pyexpat.py deleted file mode 100644 --- a/pypy/module/pyexpat/app_pyexpat.py +++ /dev/null @@ -1,6 +0,0 @@ -class ExpatError(Exception): - def __init__(self, msg, code, lineno, colno): - Exception.__init__(self, msg) - self.code = code - self.lineno = lineno - self.colno = colno diff --git a/pypy/module/cpyext/include/modsupport.inl b/pypy/module/cpyext/include/modsupport.inl deleted file mode 100644 --- a/pypy/module/cpyext/include/modsupport.inl +++ /dev/null @@ -1,29 +0,0 @@ -/* -*- C -*- */ -/* Module support interface */ - -#ifndef Py_MODSUPPORT_INL -#define Py_MODSUPPORT_INL -#ifdef __cplusplus -extern "C" { -#endif - -#ifdef PYPY_STANDALONE -/* XXX1 On translation, forwarddecl.h is included after this file */ -/* XXX2 genc.py transforms "const char*" into "char*" */ -extern PyObject *_Py_InitPyPyModule(char *, PyMethodDef *, char *, PyObject *, int); -#endif - -Py_LOCAL_INLINE(PyObject *) Py_InitModule4( - const char* name, PyMethodDef* methods, - const char* doc, PyObject *self, - int api_version) -{ - return _Py_InitPyPyModule((char*)name, methods, - (char*)doc, self, - api_version); -} - -#ifdef __cplusplus -} -#endif -#endif /* !Py_MODSUPPORT_INL */ diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py --- a/pypy/jit/backend/arm/assembler.py +++ b/pypy/jit/backend/arm/assembler.py @@ -5,8 +5,8 @@ from pypy.jit.backend.arm.codebuilder import ARMv7Builder, OverwritingBuilder from pypy.jit.backend.arm.regalloc import (Regalloc, ARMFrameManager, _check_imm_arg, TempInt, TempPtr) +from pypy.jit.backend.llsupport.regalloc import compute_vars_longevity, TempBox from pypy.jit.backend.llsupport.asmmemmgr import MachineDataBlockWrapper -from pypy.jit.backend.llsupport.regalloc import compute_vars_longevity, TempBox from pypy.jit.backend.model import CompiledLoopToken from pypy.jit.metainterp.history import (Const, ConstInt, ConstPtr, BoxInt, BoxPtr, AbstractFailDescr, @@ -19,6 +19,8 @@ from pypy.rpython.annlowlevel import llhelper from pypy.rpython.lltypesystem import lltype, rffi, llmemory from pypy.jit.backend.arm.opassembler import ResOpAssembler +from pypy.rlib.debug import (debug_print, debug_start, debug_stop, + have_debug_prints) # XXX Move to llsupport from pypy.jit.backend.x86.support import values_array @@ -58,6 +60,8 @@ self.fail_boxes_int = values_array(lltype.Signed, failargs_limit) self.fail_boxes_float = values_array(lltype.Float, failargs_limit) self.fail_boxes_ptr = values_array(llmemory.GCREF, failargs_limit) + self.fail_boxes_count = 0 + self.fail_force_index = 0 self.setup_failure_recovery() self.mc = None self.malloc_func_addr = 0 @@ -67,16 +71,12 @@ self.memcpy_addr = 0 self.teardown() self._exit_code_addr = 0 - self.datablockwrapper = None - def setup(self, looptoken): + def setup(self): assert self.memcpy_addr != 0, 'setup_once() not called?' self.mc = ARMv7Builder() self.guard_descrs = [] - if self.datablockwrapper is None: - allblocks = self.get_asmmemmgr_blocks(looptoken) - self.datablockwrapper = MachineDataBlockWrapper(self.cpu.asmmemmgr, - allblocks) + self.blocks = [] def setup_once(self): # Addresses of functions called by new_xxx operations @@ -98,6 +98,8 @@ ll_new_unicode) self.memcpy_addr = self.cpu.cast_ptr_to_int(memcpy_fn) self._exit_code_addr = self._gen_exit_path() + self._leave_jitted_jook_save_exc = self._gen_leave_jitted_hook_code(True) + self._leave_jitted_jook = self._gen_leave_jitted_hook_code(False) def setup_failure_recovery(self): @@ -168,6 +170,7 @@ assert enc[i] == self.END_OF_LOCS descr = self.decode32(enc, i+1) self.fail_boxes_count = fail_index + self.fail_force_index = frame_loc return descr def decode_inputargs(self, enc, inputargs, regalloc): @@ -222,13 +225,23 @@ mem[i+2] = chr((n >> 16) & 0xFF) mem[i+3] = chr((n >> 24) & 0xFF) + def _gen_leave_jitted_hook_code(self, save_exc=False): + mc = ARMv7Builder() + mc.PUSH([reg.value for reg in r.caller_resp] + [r.ip.value]) + addr = self.cpu.get_on_leave_jitted_int(save_exception=save_exc) + mc.BL(addr) + mc.POP([reg.value for reg in r.caller_resp]+[r.ip.value]) + assert self._exit_code_addr != 0 + mc.B(self._exit_code_addr) + return mc.materialize(self.cpu.asmmemmgr, [], + self.cpu.gc_ll_descr.gcrootmap) def _gen_exit_path(self): mc = ARMv7Builder() decode_registers_addr = llhelper(self.recovery_func_sign, self.failure_recovery_func) mc.PUSH([reg.value for reg in r.all_regs]) # registers r0 .. r10 mc.VPUSH([reg.value for reg in r.all_vfp_regs]) # registers d0 .. d15 - mc.MOV_rr(r.r0.value, r.lr.value) # move mem block address, to r0 + mc.MOV_rr(r.r0.value, r.ip.value) # move mem block address, to r0 mc.MOV_rr(r.r1.value, r.fp.value) # pass the current frame pointer as second param mc.MOV_rr(r.r2.value, r.sp.value) # pass the current stack pointer as third param @@ -240,12 +253,11 @@ return mc.materialize(self.cpu.asmmemmgr, [], self.cpu.gc_ll_descr.gcrootmap) - def _gen_path_to_exit_path(self, op, args, arglocs, fcond=c.AL): + def _gen_path_to_exit_path(self, op, args, arglocs, fcond=c.AL, save_exc=False): descr = op.getdescr() if op.getopnum() != rop.FINISH: assert isinstance(descr, AbstractFailDescr) descr._arm_frame_depth = arglocs[0].getint() - reg = r.lr # The size of the allocated memory is based on the following sizes # first argloc is the frame depth and not considered for the memory # allocation @@ -254,9 +266,12 @@ # 1 byte for the location # 1 separator byte # 4 bytes for the faildescr - # XXX free this memory - mem = lltype.malloc(rffi.CArray(lltype.Char), (len(arglocs)-1)*6+5, - flavor='raw', track_allocation=False) + memsize = (len(arglocs)-1)*6+5 + datablockwrapper = MachineDataBlockWrapper(self.cpu.asmmemmgr, + self.blocks) + memaddr = datablockwrapper.malloc_aligned(memsize, alignment=WORD) + datablockwrapper.done() + mem = rffi.cast(rffi.CArrayPtr(lltype.Char), memaddr) i = 0 j = 0 while i < len(args): @@ -265,21 +280,21 @@ loc = arglocs[i+1] if arg.type == INT: mem[j] = self.INT_TYPE + j += 1 elif arg.type == REF: mem[j] = self.REF_TYPE + j += 1 elif arg.type == FLOAT: mem[j] = self.FLOAT_TYPE + j += 1 else: assert 0, 'unknown type' - j += 1 if loc.is_reg(): mem[j] = chr(loc.value) j += 1 elif loc.is_imm(): - if not arg.type == INT or arg.type == REF: - print "Expected INT or REF values" - assert 0 + assert arg.type == INT or arg.type == REF mem[j] = self.IMM_LOC self.encode32(mem, j+1, loc.getint()) j += 5 @@ -293,12 +308,15 @@ i += 1 mem[j] = chr(0xFF) - memaddr = rffi.cast(lltype.Signed, mem) n = self.cpu.get_fail_descr_number(descr) self.encode32(mem, j+1, n) - self.mc.LDR_ri(r.lr.value, r.pc.value, imm=WORD) - self.mc.B(self._exit_code_addr) + self.mc.LDR_ri(r.ip.value, r.pc.value, imm=WORD) + if save_exc: + path = self._leave_jitted_jook_save_exc + else: + path = self._leave_jitted_jook + self.mc.B(path) self.mc.write32(memaddr) return memaddr @@ -330,7 +348,6 @@ elif loc.type == FLOAT: addr = self.fail_boxes_float.get_addr_for_num(i) else: - raise ValueError self.mc.gen_load_int(r.ip.value, addr) if not loc.type == FLOAT: self.mc.LDR_ri(reg.value, r.ip.value) @@ -341,7 +358,7 @@ looptoken._arm_arglocs = arglocs return arglocs - def gen_direct_bootstrap_code(self, arglocs, loop_head, regalloc): + def gen_direct_bootstrap_code(self, arglocs, loop_head, looptoken): self.gen_func_prolog() if len(arglocs) > 4: reg_args = 4 @@ -366,23 +383,30 @@ assert 0, 'invalid location' sp_patch_location = self._prepare_sp_patch_position() self.mc.B_offs(loop_head) - self._patch_sp_offset(sp_patch_location, regalloc) + self._patch_sp_offset(sp_patch_location, looptoken._arm_frame_depth) + def _dump(self, ops, type='loop'): + debug_start('jit-backend-ops') + debug_print(type) + for op in ops: + debug_print(op.repr()) + debug_stop('jit-backend-ops') # cpu interface def assemble_loop(self, inputargs, operations, looptoken, log): + self._dump(operations) + self.setup() + longevity = compute_vars_longevity(inputargs, operations) + regalloc = ARMRegisterManager(longevity, assembler=self, frame_manager=ARMFrameManager()) + clt = CompiledLoopToken(self.cpu, looptoken.number) looptoken.compiled_loop_token = clt - self.setup(looptoken) - longevity = compute_vars_longevity(inputargs, operations) - regalloc = Regalloc(longevity, assembler=self, frame_manager=ARMFrameManager()) - - self.align() self.gen_func_prolog() + sp_patch_location = self._prepare_sp_patch_position() arglocs = self.gen_bootstrap_code(inputargs, regalloc, looptoken) - sp_patch_location = self._prepare_sp_patch_position() - + #for x in range(5): + # self.mc.NOP() loop_head = self.mc.currpos() looptoken._arm_loop_code = loop_head @@ -390,12 +414,13 @@ self._walk_operations(operations, regalloc) - self._patch_sp_offset(sp_patch_location, regalloc) + looptoken._arm_frame_depth = regalloc.frame_manager.frame_depth + self._patch_sp_offset(sp_patch_location, looptoken._arm_frame_depth) self.align() direct_bootstrap_code = self.mc.currpos() - self.gen_direct_bootstrap_code(arglocs, loop_head, regalloc) + self.gen_direct_bootstrap_code(arglocs, loop_head, looptoken) loop_start = self.materialize_loop(looptoken) looptoken._arm_bootstrap_code = loop_start @@ -409,7 +434,8 @@ def assemble_bridge(self, faildescr, inputargs, operations, original_loop_token, log): - self.setup(original_loop_token) + self._dump(operations, 'bridge') + self.setup() assert isinstance(faildescr, AbstractFailDescr) code = faildescr._failure_recovery_code enc = rffi.cast(rffi.CCHARP, code) @@ -417,14 +443,15 @@ regalloc = Regalloc(longevity, assembler=self, frame_manager=ARMFrameManager()) + sp_patch_location = self._prepare_sp_patch_position() frame_depth = faildescr._arm_frame_depth locs = self.decode_inputargs(enc, inputargs, regalloc) regalloc.update_bindings(locs, frame_depth, inputargs) - sp_patch_location = self._prepare_sp_patch_position() self._walk_operations(operations, regalloc) - self._patch_sp_offset(sp_patch_location, regalloc) + #original_loop_token._arm_frame_depth = regalloc.frame_manager.frame_depth + self._patch_sp_offset(sp_patch_location, regalloc.frame_manager.frame_depth) bridge_start = self.materialize_loop(original_loop_token) self.update_descrs_for_bridges(bridge_start) @@ -437,9 +464,9 @@ self.teardown() def materialize_loop(self, looptoken): - self.datablockwrapper.done() # finish using cpu.asmmemmgr - self.datablockwrapper = None allblocks = self.get_asmmemmgr_blocks(looptoken) + for block in self.blocks: + allblocks.append(block) return self.mc.materialize(self.cpu.asmmemmgr, allblocks, self.cpu.gc_ll_descr.gcrootmap) @@ -468,13 +495,13 @@ self.mc.MOV_rr(r.r0.value, r.r0.value) return l - def _patch_sp_offset(self, pos, regalloc): + def _patch_sp_offset(self, pos, frame_depth): cb = OverwritingBuilder(self.mc, pos, OverwritingBuilder.size_of_gen_load_int) # Note: the frame_depth is one less than the value stored in the frame # manager - if regalloc.frame_manager.frame_depth == 1: + if frame_depth == 1: return - n = (regalloc.frame_manager.frame_depth-1)*WORD + n = (frame_depth-1)*WORD self._adjust_sp(n, cb, base_reg=r.fp) def _adjust_sp(self, n, cb=None, fcond=c.AL, base_reg=r.sp): @@ -499,26 +526,26 @@ def _walk_operations(self, operations, regalloc): fcond=c.AL - i = 0 - while i < len(operations): - regalloc.position = i + while regalloc.position < len(operations) - 1: + regalloc.next_instruction() + i = regalloc.position op = operations[i] opnum = op.getopnum() if op.has_no_side_effect() and op.result not in regalloc.longevity: - i += 1 regalloc.possibly_free_vars_for_op(op) - continue elif self.can_merge_with_next_guard(op, i, operations): + regalloc.next_instruction() arglocs = regalloc.operations_with_guard[opnum](regalloc, op, operations[i+1], fcond) fcond = self.operations_with_guard[opnum](self, op, operations[i+1], arglocs, regalloc, fcond) - i += 1 - regalloc.position = i else: arglocs = regalloc.operations[opnum](regalloc, op, fcond) fcond = self.operations[opnum](self, op, arglocs, regalloc, fcond) - i += 1 + if op.result: + regalloc.possibly_free_var(op.result) + regalloc.possibly_free_vars_for_op(op) + regalloc._check_invariants() def can_merge_with_next_guard(self, op, i, operations): if op.getopnum() == rop.CALL_MAY_FORCE or op.getopnum() == rop.CALL_ASSEMBLER: @@ -565,26 +592,53 @@ if value.is_imm(): self.mc.gen_load_int(loc.value, value.getint()) elif value.is_imm_float(): + #XXX this is wrong self.mc.gen_load_int(r.ip.value, value.getint()) self.mc.VLDR(loc.value, r.ip.value) - def regalloc_mov(self, prev_loc, loc): + # XXX needs float support + def regalloc_mov(self, prev_loc, loc, cond=c.AL): if prev_loc.is_imm(): + if loc.is_reg(): + new_loc = loc + else: + assert loc is not r.ip + new_loc = r.ip if _check_imm_arg(ConstInt(prev_loc.getint())): - self.mc.MOV_ri(loc.value, prev_loc.getint()) + self.mc.MOV_ri(new_loc.value, prev_loc.getint(), cond=cond) else: - self.mc.gen_load_int(loc.value, prev_loc.getint()) - elif loc.is_stack(): - self.mc.STR_ri(prev_loc.value, r.fp.value, loc.position*-WORD) - elif prev_loc.is_stack(): - self.mc.LDR_ri(loc.value, r.fp.value, prev_loc.position*-WORD) + self.mc.gen_load_int(new_loc.value, prev_loc.getint(), cond=cond) + prev_loc = new_loc + if not loc.is_stack(): + return + + if loc.is_stack() or prev_loc.is_stack(): + temp = r.lr + if loc.is_stack() and prev_loc.is_reg(): + offset = ConstInt(loc.position*-WORD) + if not _check_imm_arg(offset): + self.mc.gen_load_int(temp.value, offset.value) + self.mc.STR_rr(prev_loc.value, r.fp.value, temp.value, cond=cond) + else: + self.mc.STR_ri(prev_loc.value, r.fp.value, offset.value, cond=cond) + elif loc.is_reg() and prev_loc.is_stack(): + offset = ConstInt(prev_loc.position*-WORD) + if not _check_imm_arg(offset): + self.mc.gen_load_int(temp.value, offset.value) + self.mc.LDR_rr(loc.value, r.fp.value, temp.value, cond=cond) + else: + self.mc.LDR_ri(loc.value, r.fp.value, offset.value, cond=cond) + else: + assert 0, 'unsupported case' + elif loc.is_reg() and prev_loc.is_reg(): + self.mc.MOV_rr(loc.value, prev_loc.value, cond=cond) else: - self.mc.MOV_rr(loc.value, prev_loc.value) + assert 0, 'unsupported case' mov_loc_loc = regalloc_mov def regalloc_push(self, loc): if loc.is_stack(): - self.mc.LDR_ri(r.ip.value, r.fp.value, loc.position*-WORD) + self.regalloc_mov(loc, r.ip) self.mc.PUSH([r.ip.value]) elif loc.is_reg(): self.mc.PUSH([loc.value]) @@ -594,7 +648,7 @@ def regalloc_pop(self, loc): if loc.is_stack(): self.mc.POP([r.ip.value]) - self.mc.STR_ri(r.ip.value, r.fp.value, loc.position*-WORD) + self.regalloc_mov(r.ip, loc) elif loc.is_reg(): self.mc.POP([loc.value]) else: diff --git a/pypy/module/binascii/app_binascii.py b/pypy/module/binascii/app_binascii.py deleted file mode 100644 --- a/pypy/module/binascii/app_binascii.py +++ /dev/null @@ -1,6 +0,0 @@ - -class Error(Exception): - pass - -class Incomplete(Exception): - pass diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -13,7 +13,7 @@ return fcond return f -def gen_emit_op_ri(opname, imm_size=0xFF, commutative=True, allow_zero=True): +def gen_emit_op_ri(opname): ri_op = getattr(AbstractARMv7Builder, '%s_ri' % opname) rr_op = getattr(AbstractARMv7Builder, '%s_rr' % opname) def f(self, op, arglocs, regalloc, fcond): @@ -30,9 +30,15 @@ helper = getattr(AbstractARMv7Builder, opname) def f(self, op, arglocs, regalloc, fcond): assert fcond is not None - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + if op.result: + self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + else: + self.mc.PUSH([reg.value for reg in r.caller_resp]) helper(self.mc, fcond) - self.mc.POP([reg.value for reg in r.caller_resp][1:]) + if op.result: + self.mc.POP([reg.value for reg in r.caller_resp][1:]) + else: + self.mc.POP([reg.value for reg in r.caller_resp]) return fcond return f diff --git a/pypy/module/readline/interp_readline.py b/pypy/module/readline/interp_readline.py deleted file mode 100644 --- a/pypy/module/readline/interp_readline.py +++ /dev/null @@ -1,23 +0,0 @@ -# this is a sketch of how one might one day be able to define a pretty simple -# ctypes-using module, suitable for feeding to the ext-compiler - -from pypy.interpreter.baseobjspace import ObjSpace - -from pypy.module.readline import c_readline -from pypy.rpython.lltypesystem import rffi - -#------------------------------------------------------------ -# exported API (see interpleveldefs in __init__.py) -# -def readline(space, prompt): - return space.wrap(rffi.charp2str(c_readline.c_readline(prompt))) -readline.unwrap_spec = [ObjSpace, str] - -def setcompleter(space, w_callback): - """Set or remove the completer function. - The function is called as function(text, state), - for state in 0, 1, 2, ..., until it returns a non-string. - It should return the next possible completion starting with 'text'. - """ - # XXX set internal completion function - diff --git a/pypy/module/zipimport/app_zipimport.py b/pypy/module/zipimport/app_zipimport.py deleted file mode 100644 --- a/pypy/module/zipimport/app_zipimport.py +++ /dev/null @@ -1,4 +0,0 @@ - -class ZipImportError(ImportError): - pass - From commits-noreply at bitbucket.org Wed Mar 30 17:31:32 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 17:31:32 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: correct loading of constants when entering the loop and add more of the regalloc interface to the common regalloc for both kinds of registers Message-ID: <20110330153132.121C1282BDC@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43019:e252cb6eb4e2 Date: 2011-03-30 14:35 +0200 http://bitbucket.org/pypy/pypy/changeset/e252cb6eb4e2/ Log: correct loading of constants when entering the loop and add more of the regalloc interface to the common regalloc for both kinds of registers diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py --- a/pypy/jit/backend/arm/assembler.py +++ b/pypy/jit/backend/arm/assembler.py @@ -283,10 +283,10 @@ j += 1 elif arg.type == REF: mem[j] = self.REF_TYPE - j += 1 + j += 1 elif arg.type == FLOAT: mem[j] = self.FLOAT_TYPE - j += 1 + j += 1 else: assert 0, 'unknown type' @@ -341,18 +341,21 @@ for i in range(len(inputargs)): loc = inputargs[i] reg = regalloc.force_allocate_reg(loc) - if loc.type == REF: - addr = self.fail_boxes_ptr.get_addr_for_num(i) - elif loc.type == INT: - addr = self.fail_boxes_int.get_addr_for_num(i) + if loc.type != FLOAT: + if loc.type == REF: + addr = self.fail_boxes_ptr.get_addr_for_num(i) + elif loc.type == INT: + addr = self.fail_boxes_int.get_addr_for_num(i) + else: + assert 0 + self.mc.gen_load_int(reg.value, addr) + self.mc.LDR_ri(reg.value, reg.value) elif loc.type == FLOAT: addr = self.fail_boxes_float.get_addr_for_num(i) + self.mc.gen_load_int(r.ip.value, addr) + self.mc.VLDR(reg.value, r.ip.value) else: - self.mc.gen_load_int(r.ip.value, addr) - if not loc.type == FLOAT: - self.mc.LDR_ri(reg.value, r.ip.value) - else: - self.mc.VLDR(reg.value, r.ip.value) + assert 0 regalloc.possibly_free_var(loc) arglocs = [regalloc.loc(arg) for arg in inputargs] looptoken._arm_arglocs = arglocs @@ -396,7 +399,7 @@ self._dump(operations) self.setup() longevity = compute_vars_longevity(inputargs, operations) - regalloc = ARMRegisterManager(longevity, assembler=self, frame_manager=ARMFrameManager()) + regalloc = Regalloc(longevity, assembler=self, frame_manager=ARMFrameManager()) clt = CompiledLoopToken(self.cpu, looptoken.number) looptoken.compiled_loop_token = clt @@ -526,9 +529,9 @@ def _walk_operations(self, operations, regalloc): fcond=c.AL - while regalloc.position < len(operations) - 1: + while regalloc.position() < len(operations) - 1: regalloc.next_instruction() - i = regalloc.position + i = regalloc.position() op = operations[i] opnum = op.getopnum() if op.has_no_side_effect() and op.result not in regalloc.longevity: diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py --- a/pypy/jit/backend/arm/opassembler.py +++ b/pypy/jit/backend/arm/opassembler.py @@ -15,7 +15,7 @@ gen_emit_unary_float_op) from pypy.jit.backend.arm.codebuilder import ARMv7Builder, OverwritingBuilder from pypy.jit.backend.arm.jump import remap_frame_layout -from pypy.jit.backend.arm.regalloc import Regalloc +from pypy.jit.backend.arm.regalloc import Regalloc, TempInt from pypy.jit.backend.llsupport import symbolic from pypy.jit.backend.llsupport.descr import BaseFieldDescr, BaseArrayDescr from pypy.jit.backend.llsupport.regalloc import compute_vars_longevity, TempBox @@ -654,7 +654,7 @@ descr = op.getdescr() assert isinstance(descr, LoopToken) assert op.numargs() == len(descr._arm_arglocs) - resbox = TempBox() + resbox = TempInt() self._emit_call(descr._arm_direct_bootstrap_code, op.getarglist(), regalloc, fcond, result=resbox) if op.result is None: diff --git a/pypy/jit/backend/arm/test/test_regalloc.py b/pypy/jit/backend/arm/test/test_regalloc.py --- a/pypy/jit/backend/arm/test/test_regalloc.py +++ b/pypy/jit/backend/arm/test/test_regalloc.py @@ -8,7 +8,7 @@ from pypy.jit.metainterp.resoperation import rop, ResOperation from pypy.jit.backend.llsupport.descr import GcCache from pypy.jit.backend.detect_cpu import getcpuclass -from pypy.jit.backend.arm.regalloc import ARMRegisterManager +from pypy.jit.backend.arm.regalloc import RegAlloc from pypy.jit.tool.oparser import parse from pypy.rpython.lltypesystem import lltype, llmemory, rffi from pypy.rpython.annlowlevel import llhelper @@ -57,7 +57,7 @@ def load_effective_addr(self, *args): self.lea.append(args) -class RegAllocForTests(ARMRegisterManager): +class RegAllocForTests(RegAlloc): position = 0 def _compute_next_usage(self, v, _): return -1 diff --git a/pypy/jit/backend/arm/test/test_gc_integration.py b/pypy/jit/backend/arm/test/test_gc_integration.py --- a/pypy/jit/backend/arm/test/test_gc_integration.py +++ b/pypy/jit/backend/arm/test/test_gc_integration.py @@ -10,7 +10,7 @@ from pypy.jit.backend.llsupport.descr import GcCache from pypy.jit.backend.llsupport.gc import GcLLDescription from pypy.jit.backend.detect_cpu import getcpuclass -from pypy.jit.backend.arm.regalloc import ARMRegisterManager +from pypy.jit.backend.arm.regalloc import Regalloc from pypy.jit.backend.arm.arch import WORD from pypy.jit.tool.oparser import parse from pypy.rpython.lltypesystem import lltype, llmemory, rffi diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py --- a/pypy/jit/backend/arm/regalloc.py +++ b/pypy/jit/backend/arm/regalloc.py @@ -112,6 +112,29 @@ return self.vfprm.loc(var) else: return self.rm.loc(var) + + def position(self): + return self.rm.position + + def next_instruction(self): + self.rm.next_instruction() + self.vfprm.next_instruction() + + def _check_invariants(self): + self.rm._check_invariants() + self.vfprm._check_invariants() + + def stays_alive(self, v): + if v.type == FLOAT: + return self.vfprm.stays_alive(v) + else: + return self.rm.stays_alive(v) + + def after_call(self, v): + if v.type == FLOAT: + return self.vfprm.after_call(v) + else: + return self.rm.after_call(v) def force_allocate_reg(self, var, forbidden_vars=[], selected_reg=None, need_lower_byte=False): @@ -121,6 +144,12 @@ else: return self.rm.force_allocate_reg(var, forbidden_vars, selected_reg, need_lower_byte) + def try_allocate_reg(self, v, selected_reg=None, need_lower_byte=False): + if v.type == FLOAT: + return self.vfprm.try_allocate_reg(v, selected_reg, need_lower_byte) + else: + return self.rm.try_allocate_reg(v, selected_reg, need_lower_byte) + def possibly_free_var(self, var): if var.type == FLOAT: self.vfprm.possibly_free_var(var) From commits-noreply at bitbucket.org Wed Mar 30 17:31:35 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 17:31:35 +0200 (CEST) Subject: [pypy-svn] pypy arm-backed-float: start using longlong representation for floats Message-ID: <20110330153135.59A7C282BE7@codespeak.net> Author: David Schneider Branch: arm-backed-float Changeset: r43020:b296af1b90d0 Date: 2011-03-30 15:31 +0200 http://bitbucket.org/pypy/pypy/changeset/b296af1b90d0/ Log: start using longlong representation for floats diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py --- a/pypy/jit/backend/arm/assembler.py +++ b/pypy/jit/backend/arm/assembler.py @@ -8,6 +8,7 @@ from pypy.jit.backend.llsupport.regalloc import compute_vars_longevity, TempBox from pypy.jit.backend.llsupport.asmmemmgr import MachineDataBlockWrapper from pypy.jit.backend.model import CompiledLoopToken +from pypy.jit.codewriter import longlong from pypy.jit.metainterp.history import (Const, ConstInt, ConstPtr, BoxInt, BoxPtr, AbstractFailDescr, INT, REF, FLOAT) @@ -58,7 +59,7 @@ def __init__(self, cpu, failargs_limit=1000): self.cpu = cpu self.fail_boxes_int = values_array(lltype.Signed, failargs_limit) - self.fail_boxes_float = values_array(lltype.Float, failargs_limit) + self.fail_boxes_float = values_array(longlong.FLOATSTORAGE, failargs_limit) self.fail_boxes_ptr = values_array(llmemory.GCREF, failargs_limit) self.fail_boxes_count = 0 self.fail_force_index = 0 @@ -162,7 +163,7 @@ elif group == self.REF_TYPE: self.fail_boxes_ptr.setitem(fail_index, rffi.cast(llmemory.GCREF, value)) elif group == self.FLOAT_TYPE: - self.fail_boxes_float.setitem(fail_index, rffi.cast(lltype.Float, value)) + self.fail_boxes_float.setitem(fail_index, longlong.getfloatstorage(value)) else: assert 0, 'unknown type' diff --git a/pypy/jit/backend/arm/regalloc.py b/pypy/jit/backend/arm/regalloc.py --- a/pypy/jit/backend/arm/regalloc.py +++ b/pypy/jit/backend/arm/regalloc.py @@ -9,12 +9,14 @@ prepare_cmp_op, prepare_float_op, _check_imm_arg) +from pypy.jit.codewriter import longlong from pypy.jit.metainterp.history import (Const, ConstInt, ConstFloat, ConstPtr, Box, BoxInt, BoxPtr, AbstractFailDescr, INT, REF, FLOAT, LoopToken) from pypy.jit.metainterp.resoperation import rop from pypy.jit.backend.llsupport.descr import BaseFieldDescr, BaseArrayDescr from pypy.jit.backend.llsupport import symbolic +from pypy.jit.backend.llsupport.asmmemmgr import MachineDataBlockWrapper from pypy.rpython.lltypesystem import lltype, rffi, rstr, llmemory from pypy.jit.codewriter import heaptracker from pypy.rlib.objectmodel import we_are_translated @@ -56,8 +58,12 @@ save_around_call_regs = r.all_vfp_regs def convert_to_imm(self, c): - adr = self.assembler.datablockwrapper.malloc_aligned(8, 8) - rffi.cast(rffi.CArrayPtr(rffi.DOUBLE), adr)[0] = c.getfloat() + datablockwrapper = MachineDataBlockWrapper(self.assembler.cpu.asmmemmgr, + self.assembler.blocks) + adr = datablockwrapper.malloc_aligned(8, 8) + datablockwrapper.done() + x = c.getfloatstorage() + rffi.cast(rffi.CArrayPtr(longlong.FLOATSTORAGE), adr)[0] = x return locations.ConstFloatLoc(adr) def __init__(self, longevity, frame_manager=None, assembler=None): diff --git a/pypy/jit/backend/arm/runner.py b/pypy/jit/backend/arm/runner.py --- a/pypy/jit/backend/arm/runner.py +++ b/pypy/jit/backend/arm/runner.py @@ -39,8 +39,8 @@ self.assembler.assemble_bridge(faildescr, inputargs, operations, original_loop_token, log=log) - def set_future_value_float(self, index, intvalue): - self.assembler.fail_boxes_float.setitem(index, intvalue) + def set_future_value_float(self, index, floatvalue): + self.assembler.fail_boxes_float.setitem(index, floatvalue) def set_future_value_int(self, index, intvalue): self.assembler.fail_boxes_int.setitem(index, intvalue) diff --git a/pypy/jit/backend/test/runner_test.py b/pypy/jit/backend/test/runner_test.py --- a/pypy/jit/backend/test/runner_test.py +++ b/pypy/jit/backend/test/runner_test.py @@ -117,16 +117,16 @@ i0 = BoxFloat() i1 = BoxFloat() operations = [ - ResOperation(rop.FLOAT_ADD, [i0, ConstFloat(2.3)], i1), + ResOperation(rop.FLOAT_ADD, [i0, constfloat(2.3)], i1), ResOperation(rop.FINISH, [i1], None, descr=BasicFailDescr(1)) ] inputargs = [i0] looptoken = LoopToken() self.cpu.compile_loop(inputargs, operations, looptoken) - self.cpu.set_future_value_float(0, 2.8) + self.cpu.set_future_value_float(0, longlong.getfloatstorage(2.8)) fail = self.cpu.execute_token(looptoken) res = self.cpu.get_latest_value_float(0) - assert res == 5.1 + assert longlong.getrealfloat(res) == 5.1 assert fail.identifier == 1 def test_compile_loop(self): From commits-noreply at bitbucket.org Wed Mar 30 17:31:36 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 17:31:36 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: create a context manager to manage pushing and popping registers around calls Message-ID: <20110330153136.1CAEA282BE7@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43021:09c7c416d3af Date: 2011-03-30 17:27 +0200 http://bitbucket.org/pypy/pypy/changeset/09c7c416d3af/ Log: create a context manager to manage pushing and popping registers around calls diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -1,3 +1,4 @@ +from __future__ import with_statement from pypy.jit.backend.arm import conditions as c from pypy.jit.backend.arm import registers as r from pypy.jit.backend.arm.codebuilder import AbstractARMv7Builder @@ -55,3 +56,14 @@ self.mc.MOV_ri(res.value, 0, cond=inv) return fcond return f + +class saved_registers(object): + def __init__(self, assembler, regs_to_save): + self.assembler = assembler + self.regs = regs_to_save + + def __enter__(self): + self.assembler.PUSH([r.value for r in self.regs]) + + def __exit__(self, *args): + self.assembler.POP([r.value for r in self.regs]) From commits-noreply at bitbucket.org Wed Mar 30 17:31:36 2011 From: commits-noreply at bitbucket.org (bivab) Date: Wed, 30 Mar 2011 17:31:36 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: start using with saved_registers instead of PUSH/POP Message-ID: <20110330153136.E247F282BE7@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43022:a38e1e84b906 Date: 2011-03-30 17:29 +0200 http://bitbucket.org/pypy/pypy/changeset/a38e1e84b906/ Log: start using with saved_registers instead of PUSH/POP diff --git a/pypy/jit/backend/arm/assembler.py b/pypy/jit/backend/arm/assembler.py --- a/pypy/jit/backend/arm/assembler.py +++ b/pypy/jit/backend/arm/assembler.py @@ -1,3 +1,4 @@ +from pypy.jit.backend.arm.helper.assembler import saved_registers from pypy.jit.backend.arm import conditions as c from pypy.jit.backend.arm import locations from pypy.jit.backend.arm import registers as r @@ -206,10 +207,9 @@ def _gen_leave_jitted_hook_code(self, save_exc=False): mc = ARMv7Builder() - mc.PUSH([reg.value for reg in r.caller_resp] + [r.ip.value]) - addr = self.cpu.get_on_leave_jitted_int(save_exception=save_exc) - mc.BL(addr) - mc.POP([reg.value for reg in r.caller_resp]+[r.ip.value]) + with saved_registers(mc, r.caller_resp + [r.ip]): + addr = self.cpu.get_on_leave_jitted_int(save_exception=save_exc) + mc.BL(addr) assert self._exit_code_addr != 0 mc.B(self._exit_code_addr) return mc.materialize(self.cpu.asmmemmgr, [], @@ -218,14 +218,13 @@ mc = ARMv7Builder() decode_registers_addr = llhelper(self.recovery_func_sign, self.failure_recovery_func) - mc.PUSH([reg.value for reg in r.all_regs]) # registers r0 .. r10 - mc.MOV_rr(r.r0.value, r.ip.value) # move mem block address, to r0 to pass as - mc.MOV_rr(r.r1.value, r.fp.value) # pass the current frame pointer as second param - mc.MOV_rr(r.r2.value, r.sp.value) # pass the current stack pointer as third param + with saved_registers(mc, r.all_regs): + mc.MOV_rr(r.r0.value, r.ip.value) # move mem block address, to r0 to pass as + mc.MOV_rr(r.r1.value, r.fp.value) # pass the current frame pointer as second param + mc.MOV_rr(r.r2.value, r.sp.value) # pass the current stack pointer as third param - mc.BL(rffi.cast(lltype.Signed, decode_registers_addr)) - mc.MOV_rr(r.ip.value, r.r0.value) - mc.POP([reg.value for reg in r.all_regs]) + mc.BL(rffi.cast(lltype.Signed, decode_registers_addr)) + mc.MOV_rr(r.ip.value, r.r0.value) mc.MOV_rr(r.r0.value, r.ip.value) self.gen_func_epilog(mc=mc) return mc.materialize(self.cpu.asmmemmgr, [], diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -32,14 +32,11 @@ def f(self, op, arglocs, regalloc, fcond): assert fcond is not None if op.result: - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + regs = r.caller_resp[1:] else: - self.mc.PUSH([reg.value for reg in r.caller_resp]) - helper(self.mc, fcond) - if op.result: - self.mc.POP([reg.value for reg in r.caller_resp][1:]) - else: - self.mc.POP([reg.value for reg in r.caller_resp]) + regs = r.caller_resp + with saved_registers(self.mc, regs): + helper(self.mc, fcond) return fcond return f From commits-noreply at bitbucket.org Wed Mar 30 17:46:15 2011 From: commits-noreply at bitbucket.org (arigo) Date: Wed, 30 Mar 2011 17:46:15 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Uh, we don't need to clear the shadow stack's memory (which is Message-ID: <20110330154615.7AE4A282BDC@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43023:aa9a6e1e4778 Date: 2011-03-30 17:24 +0200 http://bitbucket.org/pypy/pypy/changeset/aa9a6e1e4778/ Log: Uh, we don't need to clear the shadow stack's memory (which is important in case we create a lot of threads). It is going to contain random stuff anyway after a few pushes and pops. No code should access it past the root_stack_top limit, and (at least nowadays) the shadow stack is not emitted by genc but allocated dynamically. diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -1336,10 +1336,7 @@ return top.address[0] def allocate_stack(self): - result = llmemory.raw_malloc(self.rootstacksize) - if result: - llmemory.raw_memclear(result, self.rootstacksize) - return result + return llmemory.raw_malloc(self.rootstacksize) def setup_root_walker(self): stackbase = self.allocate_stack() From commits-noreply at bitbucket.org Wed Mar 30 19:53:58 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 30 Mar 2011 19:53:58 +0200 (CEST) Subject: [pypy-svn] pypy default: The 'maxsplit' argument of str.split(sep, maxsplit=-1) is now RPython. Message-ID: <20110330175358.0FAB1282BDC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43024:12df7babf225 Date: 2011-03-29 14:58 +0200 http://bitbucket.org/pypy/pypy/changeset/12df7babf225/ Log: The 'maxsplit' argument of str.split(sep, maxsplit=-1) is now RPython. diff --git a/pypy/rpython/test/test_rstr.py b/pypy/rpython/test/test_rstr.py --- a/pypy/rpython/test/test_rstr.py +++ b/pypy/rpython/test/test_rstr.py @@ -590,6 +590,21 @@ res = self.interpret(fn, [i]) assert res == fn(i) + def test_split_limit(self): + const = self.const + def fn(i, j): + s = [const(''), const('0.1.2.4.8'), const('.1.2'), const('1.2.'), const('.1.2.4.')][i] + l = s.split(const('.'), j) + sum = 0 + for num in l: + if len(num): + sum += ord(num[0]) - ord(const('0')[0]) + return sum + len(l) * 100 + for i in range(5): + for j in range(4): + res = self.interpret(fn, [i, j]) + assert res == fn(i, j) + def test_contains(self): const = self.const constchar = self.constchar diff --git a/pypy/rpython/rstr.py b/pypy/rpython/rstr.py --- a/pypy/rpython/rstr.py +++ b/pypy/rpython/rstr.py @@ -221,14 +221,18 @@ def rtype_method_split(self, hop): rstr = hop.args_r[0].repr - v_str, v_chr = hop.inputargs(rstr.repr, rstr.char_repr) + if hop.nb_args == 3: + v_str, v_chr, v_max = hop.inputargs(rstr.repr, rstr.char_repr, Signed) + else: + v_str, v_chr = hop.inputargs(rstr.repr, rstr.char_repr) + v_max = hop.inputconst(Signed, -1) try: list_type = hop.r_result.lowleveltype.TO except AttributeError: list_type = hop.r_result.lowleveltype cLIST = hop.inputconst(Void, list_type) hop.exception_cannot_occur() - return hop.gendirectcall(self.ll.ll_split_chr, cLIST, v_str, v_chr) + return hop.gendirectcall(self.ll.ll_split_chr, cLIST, v_str, v_chr, v_max) def rtype_method_replace(self, hop): rstr = hop.args_r[0].repr diff --git a/pypy/rpython/ootypesystem/ootype.py b/pypy/rpython/ootypesystem/ootype.py --- a/pypy/rpython/ootypesystem/ootype.py +++ b/pypy/rpython/ootypesystem/ootype.py @@ -443,7 +443,7 @@ "ll_upper": Meth([], self.SELFTYPE_T), "ll_lower": Meth([], self.SELFTYPE_T), "ll_substring": Meth([Signed, Signed], self.SELFTYPE_T), # ll_substring(start, count) - "ll_split_chr": Meth([self.CHAR], Array(self.SELFTYPE_T)), # XXX this is not pure! + "ll_split_chr": Meth([self.CHAR, Signed], Array(self.SELFTYPE_T)), # XXX this is not pure! "ll_contains": Meth([self.CHAR], Bool), "ll_replace_chr_chr": Meth([self.CHAR, self.CHAR], self.SELFTYPE_T), }) @@ -1480,9 +1480,9 @@ # NOT_RPYTHON return self.make_string(self._str[start:start+count]) - def ll_split_chr(self, ch): + def ll_split_chr(self, ch, max): # NOT_RPYTHON - l = [self.make_string(s) for s in self._str.split(ch)] + l = [self.make_string(s) for s in self._str.split(ch, max)] res = _array(Array(self._TYPE), len(l)) res._array[:] = l return res diff --git a/pypy/translator/cli/src/pypylib.cs b/pypy/translator/cli/src/pypylib.cs --- a/pypy/translator/cli/src/pypylib.cs +++ b/pypy/translator/cli/src/pypylib.cs @@ -717,9 +717,12 @@ return s.Substring(start, count); } - public static string[] ll_split_chr(string s, char ch) + public static string[] ll_split_chr(string s, char ch, int max) { - return s.Split(ch); + if (max < 0) + return s.Split(ch); + else + return s.Split(new Char[] {ch}, max + 1); } public static bool ll_contains(string s, char ch) diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py --- a/pypy/rpython/lltypesystem/rstr.py +++ b/pypy/rpython/lltypesystem/rstr.py @@ -722,26 +722,35 @@ newlen = len(s1.chars) - 1 return LLHelpers._ll_stringslice(s1, 0, newlen) - def ll_split_chr(LIST, s, c): + def ll_split_chr(LIST, s, c, max): chars = s.chars strlen = len(chars) count = 1 i = 0 + if max == 0: + i = strlen while i < strlen: if chars[i] == c: count += 1 + if max >= 0 and count > max: + break i += 1 res = LIST.ll_newlist(count) items = res.ll_items() i = 0 j = 0 resindex = 0 + if max == 0: + j = strlen while j < strlen: if chars[j] == c: item = items[resindex] = s.malloc(j - i) item.copy_contents(s, item, i, 0, j - i) resindex += 1 i = j + 1 + if max >= 0 and resindex >= max: + j = strlen + break j += 1 item = items[resindex] = s.malloc(j - i) item.copy_contents(s, item, i, 0, j - i) diff --git a/pypy/annotation/unaryop.py b/pypy/annotation/unaryop.py --- a/pypy/annotation/unaryop.py +++ b/pypy/annotation/unaryop.py @@ -499,7 +499,7 @@ def getanyitem(str): return str.basecharclass() - def method_split(str, patt): # XXX + def method_split(str, patt, max=-1): getbookkeeper().count("str_split", str, patt) return getbookkeeper().newlist(str.basestringclass()) diff --git a/pypy/rpython/ootypesystem/rstr.py b/pypy/rpython/ootypesystem/rstr.py --- a/pypy/rpython/ootypesystem/rstr.py +++ b/pypy/rpython/ootypesystem/rstr.py @@ -211,8 +211,8 @@ def ll_stringslice_minusone(s): return s.ll_substring(0, s.ll_strlen()-1) - def ll_split_chr(RESULT, s, c): - return RESULT.ll_convert_from_array(s.ll_split_chr(c)) + def ll_split_chr(RESULT, s, c, max): + return RESULT.ll_convert_from_array(s.ll_split_chr(c, max)) def ll_int(s, base): if not 2 <= base <= 36: diff --git a/pypy/translator/jvm/src/pypy/PyPy.java b/pypy/translator/jvm/src/pypy/PyPy.java --- a/pypy/translator/jvm/src/pypy/PyPy.java +++ b/pypy/translator/jvm/src/pypy/PyPy.java @@ -746,11 +746,13 @@ return str.substring(start, end); } - public static Object[] ll_split_chr(String str, char c) { + public static Object[] ll_split_chr(String str, char c, int max) { ArrayList list = new ArrayList(); int lastidx = 0, idx = 0; while ((idx = str.indexOf(c, lastidx)) != -1) { + if (max >= 0 && list.size() >= max) + break; String sub = str.substring(lastidx, idx); list.add(sub); lastidx = idx+1; From commits-noreply at bitbucket.org Wed Mar 30 19:53:59 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 30 Mar 2011 19:53:59 +0200 (CEST) Subject: [pypy-svn] pypy default: Add RPython support for str.rsplit(char, maxsplit=-1) Message-ID: <20110330175359.C7056282BDC@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43025:7be96def7147 Date: 2011-03-30 11:47 +0200 http://bitbucket.org/pypy/pypy/changeset/7be96def7147/ Log: Add RPython support for str.rsplit(char, maxsplit=-1) diff --git a/pypy/rpython/test/test_rstr.py b/pypy/rpython/test/test_rstr.py --- a/pypy/rpython/test/test_rstr.py +++ b/pypy/rpython/test/test_rstr.py @@ -576,30 +576,51 @@ res = self.interpret(f, [i, newlines]) assert res == f(i, newlines) - def test_split(self): + def _make_split_test(self, split_fn): const = self.const def fn(i): s = [const(''), const('0.1.2.4.8'), const('.1.2'), const('1.2.'), const('.1.2.4.')][i] - l = s.split(const('.')) + l = getattr(s, split_fn)(const('.')) sum = 0 for num in l: if len(num): sum += ord(num[0]) - ord(const('0')[0]) return sum + len(l) * 100 + return fn + + def test_split(self): + fn = self._make_split_test('split') for i in range(5): res = self.interpret(fn, [i]) assert res == fn(i) - def test_split_limit(self): + def test_rsplit(self): + fn = self._make_split_test('rsplit') + for i in range(5): + res = self.interpret(fn, [i]) + assert res == fn(i) + + def _make_split_limit_test(self, split_fn): const = self.const def fn(i, j): s = [const(''), const('0.1.2.4.8'), const('.1.2'), const('1.2.'), const('.1.2.4.')][i] - l = s.split(const('.'), j) + l = getattr(s, split_fn)(const('.'), j) sum = 0 for num in l: if len(num): sum += ord(num[0]) - ord(const('0')[0]) return sum + len(l) * 100 + return fn + + def test_split_limit(self): + fn = self._make_split_limit_test('split') + for i in range(5): + for j in range(4): + res = self.interpret(fn, [i, j]) + assert res == fn(i, j) + + def test_rsplit_limit(self): + fn = self._make_split_limit_test('split') for i in range(5): for j in range(4): res = self.interpret(fn, [i, j]) diff --git a/pypy/rpython/rstr.py b/pypy/rpython/rstr.py --- a/pypy/rpython/rstr.py +++ b/pypy/rpython/rstr.py @@ -234,6 +234,21 @@ hop.exception_cannot_occur() return hop.gendirectcall(self.ll.ll_split_chr, cLIST, v_str, v_chr, v_max) + def rtype_method_rsplit(self, hop): + rstr = hop.args_r[0].repr + if hop.nb_args == 3: + v_str, v_chr, v_max = hop.inputargs(rstr.repr, rstr.char_repr, Signed) + else: + v_str, v_chr = hop.inputargs(rstr.repr, rstr.char_repr) + v_max = hop.inputconst(Signed, -1) + try: + list_type = hop.r_result.lowleveltype.TO + except AttributeError: + list_type = hop.r_result.lowleveltype + cLIST = hop.inputconst(Void, list_type) + hop.exception_cannot_occur() + return hop.gendirectcall(self.ll.ll_rsplit_chr, cLIST, v_str, v_chr, v_max) + def rtype_method_replace(self, hop): rstr = hop.args_r[0].repr if not (hop.args_r[1] == rstr.char_repr and hop.args_r[2] == rstr.char_repr): diff --git a/pypy/rpython/ootypesystem/ootype.py b/pypy/rpython/ootypesystem/ootype.py --- a/pypy/rpython/ootypesystem/ootype.py +++ b/pypy/rpython/ootypesystem/ootype.py @@ -444,6 +444,7 @@ "ll_lower": Meth([], self.SELFTYPE_T), "ll_substring": Meth([Signed, Signed], self.SELFTYPE_T), # ll_substring(start, count) "ll_split_chr": Meth([self.CHAR, Signed], Array(self.SELFTYPE_T)), # XXX this is not pure! + "ll_rsplit_chr": Meth([self.CHAR, Signed], Array(self.SELFTYPE_T)), # XXX this is not pure! "ll_contains": Meth([self.CHAR], Bool), "ll_replace_chr_chr": Meth([self.CHAR, self.CHAR], self.SELFTYPE_T), }) @@ -1487,6 +1488,13 @@ res._array[:] = l return res + def ll_rsplit_chr(self, ch, max): + # NOT_RPYTHON + l = [self.make_string(s) for s in self._str.rsplit(ch, max)] + res = _array(Array(self._TYPE), len(l)) + res._array[:] = l + return res + def ll_contains(self, ch): # NOT_RPYTHON return ch in self._str diff --git a/pypy/rpython/lltypesystem/rstr.py b/pypy/rpython/lltypesystem/rstr.py --- a/pypy/rpython/lltypesystem/rstr.py +++ b/pypy/rpython/lltypesystem/rstr.py @@ -756,6 +756,41 @@ item.copy_contents(s, item, i, 0, j - i) return res + def ll_rsplit_chr(LIST, s, c, max): + chars = s.chars + strlen = len(chars) + count = 1 + i = 0 + if max == 0: + i = strlen + while i < strlen: + if chars[i] == c: + count += 1 + if max >= 0 and count > max: + break + i += 1 + res = LIST.ll_newlist(count) + items = res.ll_items() + i = strlen + j = strlen + resindex = count - 1 + assert resindex >= 0 + if max == 0: + j = 0 + while j > 0: + j -= 1 + if chars[j] == c: + item = items[resindex] = s.malloc(i - j - 1) + item.copy_contents(s, item, j + 1, 0, i - j - 1) + resindex -= 1 + i = j + if resindex == 0: + j = 0 + break + item = items[resindex] = s.malloc(i - j) + item.copy_contents(s, item, j, 0, i - j) + return res + @purefunction def ll_replace_chr_chr(s, c1, c2): length = len(s.chars) diff --git a/pypy/annotation/unaryop.py b/pypy/annotation/unaryop.py --- a/pypy/annotation/unaryop.py +++ b/pypy/annotation/unaryop.py @@ -503,6 +503,10 @@ getbookkeeper().count("str_split", str, patt) return getbookkeeper().newlist(str.basestringclass()) + def method_rsplit(str, patt, max=-1): + getbookkeeper().count("str_rsplit", str, patt) + return getbookkeeper().newlist(str.basestringclass()) + def method_replace(str, s1, s2): return str.basestringclass() diff --git a/pypy/rpython/ootypesystem/rstr.py b/pypy/rpython/ootypesystem/rstr.py --- a/pypy/rpython/ootypesystem/rstr.py +++ b/pypy/rpython/ootypesystem/rstr.py @@ -214,6 +214,9 @@ def ll_split_chr(RESULT, s, c, max): return RESULT.ll_convert_from_array(s.ll_split_chr(c, max)) + def ll_rsplit_chr(RESULT, s, c, max): + return RESULT.ll_convert_from_array(s.ll_rsplit_chr(c, max)) + def ll_int(s, base): if not 2 <= base <= 36: raise ValueError From commits-noreply at bitbucket.org Wed Mar 30 19:54:01 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 30 Mar 2011 19:54:01 +0200 (CEST) Subject: [pypy-svn] pypy default: Fixes Message-ID: <20110330175401.07559282BE8@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43026:04cc61594726 Date: 2011-03-30 13:27 +0200 http://bitbucket.org/pypy/pypy/changeset/04cc61594726/ Log: Fixes diff --git a/pypy/rpython/test/test_rstr.py b/pypy/rpython/test/test_rstr.py --- a/pypy/rpython/test/test_rstr.py +++ b/pypy/rpython/test/test_rstr.py @@ -583,8 +583,8 @@ l = getattr(s, split_fn)(const('.')) sum = 0 for num in l: - if len(num): - sum += ord(num[0]) - ord(const('0')[0]) + if len(num): + sum += ord(num[0]) - ord(const('0')[0]) return sum + len(l) * 100 return fn @@ -607,8 +607,8 @@ l = getattr(s, split_fn)(const('.'), j) sum = 0 for num in l: - if len(num): - sum += ord(num[0]) - ord(const('0')[0]) + if len(num): + sum += ord(num[0]) - ord(const('0')[0]) return sum + len(l) * 100 return fn @@ -620,7 +620,7 @@ assert res == fn(i, j) def test_rsplit_limit(self): - fn = self._make_split_limit_test('split') + fn = self._make_split_limit_test('rsplit') for i in range(5): for j in range(4): res = self.interpret(fn, [i, j]) From commits-noreply at bitbucket.org Wed Mar 30 19:54:02 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 30 Mar 2011 19:54:02 +0200 (CEST) Subject: [pypy-svn] pypy default: Implement str.rsplit for the cli backend Message-ID: <20110330175402.4436D282BE8@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43027:56cab8762a29 Date: 2011-03-30 13:28 +0200 http://bitbucket.org/pypy/pypy/changeset/56cab8762a29/ Log: Implement str.rsplit for the cli backend diff --git a/pypy/translator/cli/src/pypylib.cs b/pypy/translator/cli/src/pypylib.cs --- a/pypy/translator/cli/src/pypylib.cs +++ b/pypy/translator/cli/src/pypylib.cs @@ -26,7 +26,10 @@ else { string res = ""; foreach(char ch in x) - res+= string.Format("\\x{0:X2}", (int)ch); + if (ch >= 32 && ch < 128) + res+= ch; + else + res+= string.Format("\\x{0:X2}", (int)ch); return string.Format("'{0}'", res); } } @@ -725,6 +728,25 @@ return s.Split(new Char[] {ch}, max + 1); } + public static string[] ll_rsplit_chr(string s, char ch, int max) + { + string[] splits = s.Split(ch); + if (max < 0 || splits.Length <= max + 1) + return splits; + else { + /* XXX not very efficient */ + string first = splits[0]; + // join the first (length - max - 1) items + int i; + for (i = 1; i < splits.Length - max; i++) + first += ch + splits[i]; + splits[0] = first; + Array.Copy(splits, i, splits, 1, max); + Array.Resize(ref splits, max + 1); + return splits; + } + } + public static bool ll_contains(string s, char ch) { return s.IndexOf(ch) != -1; From commits-noreply at bitbucket.org Wed Mar 30 19:54:03 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Wed, 30 Mar 2011 19:54:03 +0200 (CEST) Subject: [pypy-svn] pypy default: Add java implementation of str.rsplit() Message-ID: <20110330175403.1B742282BE8@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43028:1c14a2e9dad8 Date: 2011-03-30 13:59 +0200 http://bitbucket.org/pypy/pypy/changeset/1c14a2e9dad8/ Log: Add java implementation of str.rsplit() diff --git a/pypy/translator/jvm/src/pypy/PyPy.java b/pypy/translator/jvm/src/pypy/PyPy.java --- a/pypy/translator/jvm/src/pypy/PyPy.java +++ b/pypy/translator/jvm/src/pypy/PyPy.java @@ -761,6 +761,21 @@ return list.toArray(new String[list.size()]); } + public static Object[] ll_rsplit_chr(String str, char c, int max) { + ArrayList list = new ArrayList(); + int lastidx = str.length(), idx = 0; + while ((idx = str.lastIndexOf(c, lastidx - 1)) != -1) + { + if (max >= 0 && list.size() >= max) + break; + String sub = str.substring(idx + 1, lastidx); + list.add(0, sub); + lastidx = idx; + } + list.add(0, str.substring(0, lastidx)); + return list.toArray(new String[list.size()]); + } + public static String ll_substring(String str, int start, int cnt) { return str.substring(start,start+cnt); } From commits-noreply at bitbucket.org Wed Mar 30 20:23:56 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Wed, 30 Mar 2011 20:23:56 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: tweak abstract Message-ID: <20110330182356.0ADAC282BDC@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3445:e8da9d3ba680 Date: 2011-03-30 20:23 +0200 http://bitbucket.org/pypy/extradoc/changeset/e8da9d3ba680/ Log: tweak abstract diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -100,12 +100,11 @@ \begin{abstract} -A meta-tracing JIT is a JIT that is applicable to a variety of different +Meta-tracing JITs can be applied to a variety of different languages without explicitly encoding language semantics into the compiler. So -far, meta-tracing JITs lacked a way to feed back runtime information into the +far, they lacked a way to feed back runtime information into the compiler, which restricted their performance. In this paper we describe the -mechanisms in PyPy's meta-tracing JIT that can be used to control runtime -feedback in flexible and language-specific ways. These mechanisms are flexible +flexible mechanisms in PyPy's meta-tracing JIT that can be used to control runtime feedback in language-specific ways. These mechanisms are flexible enough to implement classical VM techniques such as maps and polymorphic inline caches. diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 0309aaa5d3ba880320e669b9cbf90865a230fb87..50c9a18281fc114a71d350cbd7190666f1494042 GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 20:27:38 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Wed, 30 Mar 2011 20:27:38 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: Idea Message-ID: <20110330182738.C419B282BDC@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3446:7fa04fa42fe6 Date: 2011-03-30 20:27 +0200 http://bitbucket.org/pypy/extradoc/changeset/7fa04fa42fe6/ Log: Idea diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -865,6 +865,9 @@ versioned types for Richards. XXX good explanation. For Telco, enabling both does not change much. +\pedronis{XXX radical idea, given that there may be no space to discuss the subtle points and that this is really about showing that we can enable such mechanisms and there is already literature that shows that/how they work, to just consider the benchmarks with no maps and no versions and with both?} + + \begin{figure} {\footnotesize \begin{center} diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index 50c9a18281fc114a71d350cbd7190666f1494042..d0c330029730051d107ab2998b0079235c652719 GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 20:34:32 2011 From: commits-noreply at bitbucket.org (pedronis) Date: Wed, 30 Mar 2011 20:34:32 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: change the accents of the invokedynamic consideration Message-ID: <20110330183432.CC712282BDC@codespeak.net> Author: Samuele Pedroni Branch: extradoc Changeset: r3447:8beee32fe6b7 Date: 2011-03-30 20:34 +0200 http://bitbucket.org/pypy/extradoc/changeset/8beee32fe6b7/ Log: change the accents of the invokedynamic consideration diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -941,9 +941,7 @@ Somewhat relatedly, the proposed ``invokedynamic'' bytecode \cite{rose_bytecodes_2009} that will be added to the JVM is supposed to make the -implementation of dynamic languages on top of JVMs easier. The bytecode gives -the language implementor control over how the JIT optimizes the language's -features and when optimized code needs to be deoptimized. XXX +implementation of dynamic languages on top of JVMs easier. The bytecode gives access to user accessible generalized inline cache. It requires of course compilation to JVM bytecode instead of simply writing an interpreter, predictability of performance across JVMs is also an open question. We already explored promotion in other context, such as earlier versions of PyPy's JIT \cite{armin_rigo_jit_2007} as well as a Prolog partial evaluator diff --git a/talk/icooolps2011/jit-hints.pdf b/talk/icooolps2011/jit-hints.pdf index d0c330029730051d107ab2998b0079235c652719..319fb7f0ad019fd3dc01575729a8ac0e2767baca GIT binary patch [cut] From commits-noreply at bitbucket.org Wed Mar 30 22:42:54 2011 From: commits-noreply at bitbucket.org (lac) Date: Wed, 30 Mar 2011 22:42:54 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: minor language rephrasing Message-ID: <20110330204254.BE56C282BDC@codespeak.net> Author: Laura Creighton Branch: extradoc Changeset: r3448:903e1f1c541e Date: 2011-03-30 16:19 +0200 http://bitbucket.org/pypy/extradoc/changeset/903e1f1c541e/ Log: minor language rephrasing diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -839,8 +839,8 @@ decimal benchmark\footnote{\texttt{http://speleotrove.com/decimal/telco.html}}, using a pure Python decimal floating point implementation. The results we see in these two benchmarks seem to repeat themselves in other benchmarks using -object-oriented code, for purely numerical algorithms the speedups are a lot -lower. +object-oriented code; for purely numerical algorithms the speedups are +significantly smaller. The benchmarks were run on an otherwise idle Intel Core2 Duo P8400 processor with 2.26 GHz and 3072 KB of cache on a machine with 3GB RAM running Linux @@ -849,8 +849,8 @@ CPython 2.6.6\footnote{\texttt{http://python.org}}, which uses a bytecode-based interpreter. We compare it against four versions of PyPy's Python interpreter, all of them with JIT enabled. The PyPy baseline does not enable maps or type -versions. Then we have a version each where maps and versions are enabled alone -and finally a version with both. +versions. We then benchmarked PyPy, first using each technique separately, +and finally using both together. All benchmarks were run 50 times in the same process, to give the JIT time to produce machine code. The arithmetic mean of the times of the last 30 runs were @@ -859,12 +859,12 @@ reported in Figure~\ref{fig:times}. Versioned types speed up both benchmarks by a significant factor of around 7. -The speed advantage of maps alones is a lot less clear. Maps also have a memory +The speed advantage of maps alone is a lot less clear. Maps also have a memory advantage which we did not measure here. By themselves, maps improved the Richards benchmark slightly, but made the Telco benchmark slower. Enabling both maps and versioned types together yields a significant improvement over just versioned types for Richards. XXX good explanation. For Telco, enabling both -does not change much. +has little effect over the gains for versioned types alone. \begin{figure} {\footnotesize From commits-noreply at bitbucket.org Wed Mar 30 22:42:55 2011 From: commits-noreply at bitbucket.org (lac) Date: Wed, 30 Mar 2011 22:42:55 +0200 (CEST) Subject: [pypy-svn] extradoc extradoc: merge heads Message-ID: <20110330204255.A74B5282BDC@codespeak.net> Author: Laura Creighton Branch: extradoc Changeset: r3449:a32c661cdd9a Date: 2011-03-30 22:42 +0200 http://bitbucket.org/pypy/extradoc/changeset/a32c661cdd9a/ Log: merge heads diff --git a/talk/icooolps2011/paper.tex b/talk/icooolps2011/paper.tex --- a/talk/icooolps2011/paper.tex +++ b/talk/icooolps2011/paper.tex @@ -100,12 +100,11 @@ \begin{abstract} -A meta-tracing JIT is a JIT that is applicable to a variety of different +Meta-tracing JITs can be applied to a variety of different languages without explicitly encoding language semantics into the compiler. So -far, meta-tracing JITs lacked a way to feed back runtime information into the +far, they lacked a way to feed back runtime information into the compiler, which restricted their performance. In this paper we describe the -mechanisms in PyPy's meta-tracing JIT that can be used to control runtime -feedback in flexible and language-specific ways. These mechanisms are flexible +flexible mechanisms in PyPy's meta-tracing JIT that can be used to control runtime feedback in language-specific ways. These mechanisms are flexible enough to implement classical VM techniques such as maps and polymorphic inline caches. @@ -866,6 +865,9 @@ versioned types for Richards. XXX good explanation. For Telco, enabling both has little effect over the gains for versioned types alone. +\pedronis{XXX radical idea, given that there may be no space to discuss the subtle points and that this is really about showing that we can enable such mechanisms and there is already literature that shows that/how they work, to just consider the benchmarks with no maps and no versions and with both?} + + \begin{figure} {\footnotesize \begin{center} @@ -939,9 +941,7 @@ Somewhat relatedly, the proposed ``invokedynamic'' bytecode \cite{rose_bytecodes_2009} that will be added to the JVM is supposed to make the -implementation of dynamic languages on top of JVMs easier. The bytecode gives -the language implementor control over how the JIT optimizes the language's -features and when optimized code needs to be deoptimized. XXX +implementation of dynamic languages on top of JVMs easier. The bytecode gives access to user accessible generalized inline cache. It requires of course compilation to JVM bytecode instead of simply writing an interpreter, predictability of performance across JVMs is also an open question. We already explored promotion in other context, such as earlier versions of PyPy's JIT \cite{armin_rigo_jit_2007} as well as a Prolog partial evaluator From commits-noreply at bitbucket.org Thu Mar 31 06:41:07 2011 From: commits-noreply at bitbucket.org (alex_gaynor) Date: Thu, 31 Mar 2011 06:41:07 +0200 (CEST) Subject: [pypy-svn] pypy default: fix tests Message-ID: <20110331044107.3B8D2282BE7@codespeak.net> Author: Alex Gaynor Branch: Changeset: r43029:ca83a9422a73 Date: 2011-03-31 00:40 -0400 http://bitbucket.org/pypy/pypy/changeset/ca83a9422a73/ Log: fix tests diff --git a/pypy/jit/metainterp/test/test_optimizebasic.py b/pypy/jit/metainterp/test/test_optimizebasic.py --- a/pypy/jit/metainterp/test/test_optimizebasic.py +++ b/pypy/jit/metainterp/test/test_optimizebasic.py @@ -253,7 +253,7 @@ loop.call_pure_results = args_dict() if call_pure_results is not None: for k, v in call_pure_results.items(): - loop.call_pure_results[list(k)] = v + loop.call_pure_results[list(k)] = v metainterp_sd = FakeMetaInterpStaticData(self.cpu) if hasattr(self, 'vrefinfo'): metainterp_sd.virtualref_info = self.vrefinfo @@ -2886,7 +2886,7 @@ # the result of the call, recorded as the first arg), or turned into # a regular CALL. arg_consts = [ConstInt(i) for i in (123456, 4, 5, 6)] - call_pure_results = {tuple(arg_consts): ConstInt(42)} + call_pure_results = {tuple(arg_consts): ConstInt(42)} ops = ''' [i0, i1, i2] escape(i1) @@ -2931,7 +2931,6 @@ i0 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i0, descr=virtualtokendescr) - setfield_gc(p2, 5, descr=virtualrefindexdescr) escape(p2) setfield_gc(p2, p1, descr=virtualforceddescr) setfield_gc(p2, -3, descr=virtualtokendescr) @@ -2964,7 +2963,6 @@ # p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 3, descr=virtualrefindexdescr) setfield_gc(p0, p2, descr=nextdescr) # call_may_force(i1, descr=mayforcevirtdescr) @@ -3005,7 +3003,6 @@ # p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 2, descr=virtualrefindexdescr) setfield_gc(p0, p2, descr=nextdescr) # call_may_force(i1, descr=mayforcevirtdescr) @@ -3062,7 +3059,7 @@ self.loop.inputargs[0].value = self.nodeobjvalue self.check_expanded_fail_descr('''p2, p1 p0.refdescr = p2 - where p2 is a jit_virtual_ref_vtable, virtualtokendescr=i3, virtualrefindexdescr=2 + where p2 is a jit_virtual_ref_vtable, virtualtokendescr=i3 where p1 is a node_vtable, nextdescr=p1b where p1b is a node_vtable, valuedescr=i1 ''', rop.GUARD_NO_EXCEPTION) @@ -3084,7 +3081,6 @@ i3 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 7, descr=virtualrefindexdescr) escape(p2) p1 = new_with_vtable(ConstClass(node_vtable)) setfield_gc(p2, p1, descr=virtualforceddescr) @@ -3111,7 +3107,6 @@ i3 = force_token() p2 = new_with_vtable(ConstClass(jit_virtual_ref_vtable)) setfield_gc(p2, i3, descr=virtualtokendescr) - setfield_gc(p2, 23, descr=virtualrefindexdescr) escape(p2) setfield_gc(p2, p1, descr=virtualforceddescr) setfield_gc(p2, -3, descr=virtualtokendescr) @@ -3360,7 +3355,7 @@ i1 = int_lt(i0, 4) guard_true(i1) [] i1p = int_gt(i0, -4) - guard_true(i1p) [] + guard_true(i1p) [] i2 = int_sub(i0, 10) i3 = int_lt(i2, -5) guard_true(i3) [] @@ -3371,7 +3366,7 @@ i1 = int_lt(i0, 4) guard_true(i1) [] i1p = int_gt(i0, -4) - guard_true(i1p) [] + guard_true(i1p) [] i2 = int_sub(i0, 10) jump(i0) """ From commits-noreply at bitbucket.org Thu Mar 31 10:55:44 2011 From: commits-noreply at bitbucket.org (bivab) Date: Thu, 31 Mar 2011 10:55:44 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: Use save_registers when generation operations surrounded by PUSH/POP Message-ID: <20110331085544.9EA74282B90@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43030:7aa9e9f74ec6 Date: 2011-03-31 09:56 +0200 http://bitbucket.org/pypy/pypy/changeset/7aa9e9f74ec6/ Log: Use save_registers when generation operations surrounded by PUSH/POP diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py --- a/pypy/jit/backend/arm/opassembler.py +++ b/pypy/jit/backend/arm/opassembler.py @@ -8,7 +8,9 @@ from pypy.jit.backend.arm.helper.assembler import (gen_emit_op_by_helper_call, gen_emit_op_unary_cmp, - gen_emit_op_ri, gen_emit_cmp_op) + gen_emit_op_ri, + gen_emit_cmp_op, + saved_registers) from pypy.jit.backend.arm.codebuilder import ARMv7Builder, OverwritingBuilder from pypy.jit.backend.arm.jump import remap_frame_layout from pypy.jit.backend.arm.regalloc import ARMRegisterManager @@ -270,33 +272,30 @@ t = TempBox() regalloc.force_allocate_reg(t, selected_reg=regalloc.call_result_location(t)) regalloc.possibly_free_var(t) - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) + saved_regs = r.caller_resp[1:] else: - self.mc.PUSH([reg.value for reg in r.caller_resp]) + saved_regs = r.caller_resp + with saved_registers(self.mc, saved_regs): + # all arguments past the 4th go on the stack + if n_args > 4: + stack_args = n_args - 4 + n = stack_args*WORD + self._adjust_sp(n, fcond=fcond) + for i in range(4, n_args): + self.mov_loc_loc(regalloc.loc(args[i]), r.ip) + self.mc.STR_ri(r.ip.value, r.sp.value, (i-4)*WORD) - # all arguments past the 4th go on the stack - if n_args > 4: - stack_args = n_args - 4 - n = stack_args*WORD - self._adjust_sp(n, fcond=fcond) - for i in range(4, n_args): - self.mov_loc_loc(regalloc.loc(args[i]), r.ip) - self.mc.STR_ri(r.ip.value, r.sp.value, (i-4)*WORD) + #the actual call + self.mc.BL(adr) + regalloc.possibly_free_vars(args) + # readjust the sp in case we passed some args on the stack + if n_args > 4: + assert n > 0 + self._adjust_sp(-n, fcond=fcond) - #the actual call - self.mc.BL(adr) - regalloc.possibly_free_vars(args) - # readjust the sp in case we passed some args on the stack - if n_args > 4: - assert n > 0 - self._adjust_sp(-n, fcond=fcond) - - # restore the argumets stored on the stack - if result is not None: - regalloc.after_call(result) - self.mc.POP([reg.value for reg in r.caller_resp][1:]) - else: - self.mc.POP([reg.value for reg in r.caller_resp]) + # restore the argumets stored on the stack + if result is not None: + regalloc.after_call(result) return fcond def emit_op_same_as(self, op, arglocs, regalloc, fcond): @@ -682,11 +681,10 @@ jd = descr.outermost_jitdriver_sd assert jd is not None asm_helper_adr = self.cpu.cast_adr_to_int(jd.assembler_helper_adr) - self.mc.PUSH([reg.value for reg in r.caller_resp][1:]) - # resbox is allready in r0 - self.mov_loc_loc(arglocs[1], r.r1) - self.mc.BL(asm_helper_adr) - self.mc.POP([reg.value for reg in r.caller_resp][1:]) + with saved_registers(self.mc, r.caller_resp[1:]): + # resbox is allready in r0 + self.mov_loc_loc(arglocs[1], r.r1) + self.mc.BL(asm_helper_adr) if op.result: regalloc.after_call(op.result) # jump to merge point From commits-noreply at bitbucket.org Thu Mar 31 10:55:45 2011 From: commits-noreply at bitbucket.org (bivab) Date: Thu, 31 Mar 2011 10:55:45 +0200 (CEST) Subject: [pypy-svn] pypy arm-backend-2: filter registers to be pushed and popped based on the reg_bindings Message-ID: <20110331085545.89BC9282B90@codespeak.net> Author: David Schneider Branch: arm-backend-2 Changeset: r43031:5cf8d2cf7e80 Date: 2011-03-31 10:54 +0200 http://bitbucket.org/pypy/pypy/changeset/5cf8d2cf7e80/ Log: filter registers to be pushed and popped based on the reg_bindings diff --git a/pypy/jit/backend/arm/opassembler.py b/pypy/jit/backend/arm/opassembler.py --- a/pypy/jit/backend/arm/opassembler.py +++ b/pypy/jit/backend/arm/opassembler.py @@ -275,7 +275,7 @@ saved_regs = r.caller_resp[1:] else: saved_regs = r.caller_resp - with saved_registers(self.mc, saved_regs): + with saved_registers(self.mc, saved_regs, regalloc=regalloc): # all arguments past the 4th go on the stack if n_args > 4: stack_args = n_args - 4 @@ -681,7 +681,7 @@ jd = descr.outermost_jitdriver_sd assert jd is not None asm_helper_adr = self.cpu.cast_adr_to_int(jd.assembler_helper_adr) - with saved_registers(self.mc, r.caller_resp[1:]): + with saved_registers(self.mc, r.caller_resp[1:], regalloc=regalloc): # resbox is allready in r0 self.mov_loc_loc(arglocs[1], r.r1) self.mc.BL(asm_helper_adr) diff --git a/pypy/jit/backend/arm/helper/assembler.py b/pypy/jit/backend/arm/helper/assembler.py --- a/pypy/jit/backend/arm/helper/assembler.py +++ b/pypy/jit/backend/arm/helper/assembler.py @@ -35,7 +35,7 @@ regs = r.caller_resp[1:] else: regs = r.caller_resp - with saved_registers(self.mc, regs): + with saved_registers(self.mc, regs, regalloc=regalloc): helper(self.mc, fcond) return fcond return f @@ -55,12 +55,25 @@ return f class saved_registers(object): - def __init__(self, assembler, regs_to_save): + def __init__(self, assembler, regs_to_save, regalloc=None): self.assembler = assembler - self.regs = regs_to_save + self.regalloc = regalloc + if self.regalloc: + self._filter_regs(regs_to_save) + else: + self.regs = regs_to_save def __enter__(self): - self.assembler.PUSH([r.value for r in self.regs]) + if len(self.regs) > 0: + self.assembler.PUSH([r.value for r in self.regs]) def __exit__(self, *args): - self.assembler.POP([r.value for r in self.regs]) + if len(self.regs) > 0: + self.assembler.POP([r.value for r in self.regs]) + + def _filter_regs(self, regs_to_save): + regs = [] + for box, reg in self.regalloc.reg_bindings.iteritems(): + if reg in regs_to_save or reg is r.ip: + regs.append(reg) + self.regs = regs From commits-noreply at bitbucket.org Thu Mar 31 11:43:16 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:16 +0200 (CEST) Subject: [pypy-svn] pypy default: hg merge jit-shadowstack (only one checkin so far). Message-ID: <20110331094316.8BD77282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r43032:a054daecafde Date: 2011-03-30 17:55 +0200 http://bitbucket.org/pypy/pypy/changeset/a054daecafde/ Log: hg merge jit-shadowstack (only one checkin so far). From commits-noreply at bitbucket.org Thu Mar 31 11:43:17 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:17 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Split to its own file the part that can be reused with jit+shadowstack. Message-ID: <20110331094317.EFE3F282B90@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43033:a55b47edd32f Date: 2011-03-30 20:08 +0200 http://bitbucket.org/pypy/pypy/changeset/a55b47edd32f/ Log: Split to its own file the part that can be reused with jit+shadowstack. diff --git a/pypy/rpython/memory/gctransform/asmgcroot.py b/pypy/rpython/memory/gctransform/asmgcroot.py --- a/pypy/rpython/memory/gctransform/asmgcroot.py +++ b/pypy/rpython/memory/gctransform/asmgcroot.py @@ -1,5 +1,7 @@ from pypy.rpython.memory.gctransform.framework import FrameworkGCTransformer from pypy.rpython.memory.gctransform.framework import BaseRootWalker +from pypy.rpython.memory.gctransform import gcjit +from pypy.rpython.memory.gctransform.gcjit import sizeofaddr, arrayitemsize from pypy.rpython.lltypesystem import lltype, llmemory, rffi from pypy.rpython.lltypesystem.lloperation import llop from pypy.rpython.rbuiltin import gen_cast @@ -137,15 +139,7 @@ self.walk_stack_from() self._asm_callback = _asm_callback self._shape_decompressor = ShapeDecompressor() - if hasattr(gctransformer.translator, '_jit2gc'): - jit2gc = gctransformer.translator._jit2gc - self._extra_gcmapstart = jit2gc['gcmapstart'] - self._extra_gcmapend = jit2gc['gcmapend'] - self._extra_mark_sorted = jit2gc['gcmarksorted'] - else: - self._extra_gcmapstart = lambda: llmemory.NULL - self._extra_gcmapend = lambda: llmemory.NULL - self._extra_mark_sorted = lambda: True + self._jittablesearch = gcjit.GcJitTableSearch(gctransformer.translator) def need_thread_support(self, gctransformer, getfn): # Threads supported "out of the box" by the rest of the code. @@ -391,36 +385,14 @@ if item: self._shape_decompressor.setpos(item.signed[1]) return - gcmapstart2 = self._extra_gcmapstart() - gcmapend2 = self._extra_gcmapend() - if gcmapstart2 != gcmapend2: - # we have a non-empty JIT-produced table to look in - item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) - if item: - self._shape_decompressor.setaddr(item) - return - # maybe the JIT-produced table is not sorted? - was_already_sorted = self._extra_mark_sorted() - if not was_already_sorted: - sort_gcmap(gcmapstart2, gcmapend2) - item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) - if item: - self._shape_decompressor.setaddr(item) - return - # there is a rare risk that the array contains *two* entries - # with the same key, one of which is dead (null value), and we - # found the dead one above. Solve this case by replacing all - # dead keys with nulls, sorting again, and then trying again. - replace_dead_entries_with_nulls(gcmapstart2, gcmapend2) - sort_gcmap(gcmapstart2, gcmapend2) - item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) - if item: - self._shape_decompressor.setaddr(item) - return + item = self._jittablesearch.look_in_jit_table(retaddr) + if item: + self._shape_decompressor.setaddr(item) + return # the item may have been not found because the main array was # not sorted. Sort it and try again. win32_follow_gcmap_jmp(gcmapstart, gcmapend) - sort_gcmap(gcmapstart, gcmapend) + gcjit.sort_gcmap(gcmapstart, gcmapend) item = search_in_gcmap(gcmapstart, gcmapend, retaddr) if item: self._shape_decompressor.setpos(item.signed[1]) @@ -461,32 +433,8 @@ # ____________________________________________________________ -sizeofaddr = llmemory.sizeof(llmemory.Address) -arrayitemsize = 2 * sizeofaddr - - -def binary_search(start, end, addr1): - """Search for an element in a sorted array. - - The interval from the start address (included) to the end address - (excluded) is assumed to be a sorted arrays of pairs (addr1, addr2). - This searches for the item with a given addr1 and returns its - address. If not found exactly, it tries to return the address - of the item left of addr1 (i.e. such that result.address[0] < addr1). - """ - count = (end - start) // arrayitemsize - while count > 1: - middleindex = count // 2 - middle = start + middleindex * arrayitemsize - if addr1 < middle.address[0]: - count = middleindex - else: - start = middle - count -= middleindex - return start - def search_in_gcmap(gcmapstart, gcmapend, retaddr): - item = binary_search(gcmapstart, gcmapend, retaddr) + item = gcjit.binary_search(gcmapstart, gcmapend, retaddr) if item.address[0] == retaddr: return item # found # 'retaddr' not exactly found. Check that 'item' is the start of a @@ -496,31 +444,6 @@ else: return llmemory.NULL # failed -def search_in_gcmap2(gcmapstart, gcmapend, retaddr): - # same as 'search_in_gcmap', but without range checking support - # (item.signed[1] is an address in this case, not a signed at all!) - item = binary_search(gcmapstart, gcmapend, retaddr) - if item.address[0] == retaddr: - return item.address[1] # found - else: - return llmemory.NULL # failed - -def sort_gcmap(gcmapstart, gcmapend): - count = (gcmapend - gcmapstart) // arrayitemsize - qsort(gcmapstart, - rffi.cast(rffi.SIZE_T, count), - rffi.cast(rffi.SIZE_T, arrayitemsize), - llhelper(QSORT_CALLBACK_PTR, _compare_gcmap_entries)) - -def replace_dead_entries_with_nulls(start, end): - # replace the dead entries (null value) with a null key. - count = (end - start) // arrayitemsize - 1 - while count >= 0: - item = start + count * arrayitemsize - if item.address[1] == llmemory.NULL: - item.address[0] = llmemory.NULL - count -= 1 - if sys.platform == 'win32': def win32_follow_gcmap_jmp(start, end): # The initial gcmap table contains addresses to a JMP @@ -537,17 +460,6 @@ def win32_follow_gcmap_jmp(start, end): pass -def _compare_gcmap_entries(addr1, addr2): - key1 = addr1.address[0] - key2 = addr2.address[0] - if key1 < key2: - result = -1 - elif key1 == key2: - result = 0 - else: - result = 1 - return rffi.cast(rffi.INT, result) - # ____________________________________________________________ class ShapeDecompressor: @@ -645,14 +557,3 @@ sandboxsafe=True, _nowrapper=True) c_asm_nocollect = Constant(pypy_asm_nocollect, lltype.typeOf(pypy_asm_nocollect)) - -QSORT_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([llmemory.Address, - llmemory.Address], rffi.INT)) -qsort = rffi.llexternal('qsort', - [llmemory.Address, - rffi.SIZE_T, - rffi.SIZE_T, - QSORT_CALLBACK_PTR], - lltype.Void, - sandboxsafe=True, - _nowrapper=True) diff --git a/pypy/rpython/memory/gctransform/gcjit.py b/pypy/rpython/memory/gctransform/gcjit.py new file mode 100644 --- /dev/null +++ b/pypy/rpython/memory/gctransform/gcjit.py @@ -0,0 +1,116 @@ +from pypy.rpython.lltypesystem import lltype, llmemory, rffi +from pypy.rpython.annlowlevel import llhelper + + +sizeofaddr = llmemory.sizeof(llmemory.Address) +arrayitemsize = 2 * sizeofaddr + + +def binary_search(start, end, addr1): + """Search for an element in a sorted array. + + The interval from the start address (included) to the end address + (excluded) is assumed to be a sorted arrays of pairs (addr1, addr2). + This searches for the item with a given addr1 and returns its + address. If not found exactly, it tries to return the address + of the item left of addr1 (i.e. such that result.address[0] < addr1). + """ + count = (end - start) // arrayitemsize + while count > 1: + middleindex = count // 2 + middle = start + middleindex * arrayitemsize + if addr1 < middle.address[0]: + count = middleindex + else: + start = middle + count -= middleindex + return start + +def search_in_gcmap_direct(gcmapstart, gcmapend, key): + # same as 'search_in_gcmap' in asmgcroot.py, but without range checking + # support (item.address[1] is an address in this case, not a int at all!) + item = binary_search(gcmapstart, gcmapend, key) + if item.address[0] == key: + return item.address[1] # found + else: + return llmemory.NULL # failed + +def sort_gcmap(gcmapstart, gcmapend): + count = (gcmapend - gcmapstart) // arrayitemsize + qsort(gcmapstart, + rffi.cast(rffi.SIZE_T, count), + rffi.cast(rffi.SIZE_T, arrayitemsize), + llhelper(QSORT_CALLBACK_PTR, _compare_gcmap_entries)) + +QSORT_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([llmemory.Address, + llmemory.Address], rffi.INT)) +qsort = rffi.llexternal('qsort', + [llmemory.Address, + rffi.SIZE_T, + rffi.SIZE_T, + QSORT_CALLBACK_PTR], + lltype.Void, + sandboxsafe=True, + _nowrapper=True) + +def replace_dead_entries_with_nulls(start, end): + # replace the dead entries (null value) with a null key. + count = (end - start) // arrayitemsize - 1 + while count >= 0: + item = start + count * arrayitemsize + if item.address[1] == llmemory.NULL: + item.address[0] = llmemory.NULL + count -= 1 + +def _compare_gcmap_entries(addr1, addr2): + key1 = addr1.address[0] + key2 = addr2.address[0] + if key1 < key2: + result = -1 + elif key1 == key2: + result = 0 + else: + result = 1 + return rffi.cast(rffi.INT, result) + + +class GcJitTableSearch(object): + + def __init__(self, translator): + if hasattr(translator, '_jit2gc'): + jit2gc = translator._jit2gc + self._extra_gcmapstart = jit2gc['gcmapstart'] + self._extra_gcmapend = jit2gc['gcmapend'] + self._extra_mark_sorted = jit2gc['gcmarksorted'] + else: + self._extra_gcmapstart = lambda: llmemory.NULL + self._extra_gcmapend = lambda: llmemory.NULL + self._extra_mark_sorted = lambda: True + + def _freeze_(self): + return True + + def look_in_jit_table(self, key): + gcmapstart2 = self._extra_gcmapstart() + gcmapend2 = self._extra_gcmapend() + if gcmapstart2 == gcmapend2: + return llmemory.NULL + # we have a non-empty JIT-produced table to look in + item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) + if item: + return item + # maybe the JIT-produced table is not sorted? + was_already_sorted = self._extra_mark_sorted() + if not was_already_sorted: + sort_gcmap(gcmapstart2, gcmapend2) + item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) + if item: + return item + # there is a rare risk that the array contains *two* entries + # with the same key, one of which is dead (null value), and we + # found the dead one above. Solve this case by replacing all + # dead keys with nulls, sorting again, and then trying again. + replace_dead_entries_with_nulls(gcmapstart2, gcmapend2) + sort_gcmap(gcmapstart2, gcmapend2) + item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) + return item From commits-noreply at bitbucket.org Thu Mar 31 11:43:19 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:19 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Add a hook. Message-ID: <20110331094319.3A119282BE9@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43034:9fd25472453a Date: 2011-03-30 20:08 +0200 http://bitbucket.org/pypy/pypy/changeset/9fd25472453a/ Log: Add a hook. diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -9,6 +9,7 @@ from pypy.rlib.rarithmetic import ovfcheck from pypy.rlib import rstack, rgc from pypy.rlib.debug import ll_assert +from pypy.rlib.objectmodel import we_are_translated from pypy.translator.backendopt import graphanalyze from pypy.translator.backendopt.support import var_needsgc from pypy.annotation import model as annmodel @@ -151,8 +152,13 @@ # for regular translation: pick the GC from the config GCClass, GC_PARAMS = choose_gc_from_config(translator.config) + self.root_stack_jit_hook = None if hasattr(translator, '_jit2gc'): self.layoutbuilder = translator._jit2gc['layoutbuilder'] + try: + self.root_stack_jit_hook = translator._jit2gc['rootstackhook'] + except KeyError: + pass else: self.layoutbuilder = TransformerLayoutBuilder(translator, GCClass) self.layoutbuilder.transformer = self @@ -1327,6 +1333,15 @@ return top self.decr_stack = decr_stack + self.rootstackhook = gctransformer.root_stack_jit_hook + if self.rootstackhook is None: + def collect_stack_root(callback, gc, addr): + if we_are_translated(): + ll_assert(addr.address[0].signed[0] != 0, + "unexpected null object header") + callback(gc, addr) + self.rootstackhook = collect_stack_root + def push_stack(self, addr): top = self.incr_stack(1) top.address[0] = addr @@ -1348,11 +1363,12 @@ def walk_stack_roots(self, collect_stack_root): gcdata = self.gcdata gc = self.gc + rootstackhook = self.rootstackhook addr = gcdata.root_stack_base end = gcdata.root_stack_top while addr != end: if gc.points_to_valid_gc_object(addr): - collect_stack_root(gc, addr) + rootstackhook(collect_stack_root, gc, addr) addr += sizeofaddr if self.collect_stacks_from_other_threads is not None: self.collect_stacks_from_other_threads(collect_stack_root) @@ -1460,11 +1476,12 @@ # collect all valid stacks from the dict (the entry # corresponding to the current thread is not valid) gc = self.gc + rootstackhook = self.rootstackhook end = stacktop - sizeofaddr addr = end.address[0] while addr != end: if gc.points_to_valid_gc_object(addr): - callback(gc, addr) + rootstackhook(callback, gc, addr) addr += sizeofaddr def collect_more_stacks(callback): From commits-noreply at bitbucket.org Thu Mar 31 11:43:21 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:21 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Backout a55b47edd32f. I think now that using that is Message-ID: <20110331094321.081F6282BE9@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43035:7a9e241fdcee Date: 2011-03-30 20:23 +0200 http://bitbucket.org/pypy/pypy/changeset/7a9e241fdcee/ Log: Backout a55b47edd32f. I think now that using that is unnecessarily complex in this case. diff --git a/pypy/rpython/memory/gctransform/asmgcroot.py b/pypy/rpython/memory/gctransform/asmgcroot.py --- a/pypy/rpython/memory/gctransform/asmgcroot.py +++ b/pypy/rpython/memory/gctransform/asmgcroot.py @@ -1,7 +1,5 @@ from pypy.rpython.memory.gctransform.framework import FrameworkGCTransformer from pypy.rpython.memory.gctransform.framework import BaseRootWalker -from pypy.rpython.memory.gctransform import gcjit -from pypy.rpython.memory.gctransform.gcjit import sizeofaddr, arrayitemsize from pypy.rpython.lltypesystem import lltype, llmemory, rffi from pypy.rpython.lltypesystem.lloperation import llop from pypy.rpython.rbuiltin import gen_cast @@ -139,7 +137,15 @@ self.walk_stack_from() self._asm_callback = _asm_callback self._shape_decompressor = ShapeDecompressor() - self._jittablesearch = gcjit.GcJitTableSearch(gctransformer.translator) + if hasattr(gctransformer.translator, '_jit2gc'): + jit2gc = gctransformer.translator._jit2gc + self._extra_gcmapstart = jit2gc['gcmapstart'] + self._extra_gcmapend = jit2gc['gcmapend'] + self._extra_mark_sorted = jit2gc['gcmarksorted'] + else: + self._extra_gcmapstart = lambda: llmemory.NULL + self._extra_gcmapend = lambda: llmemory.NULL + self._extra_mark_sorted = lambda: True def need_thread_support(self, gctransformer, getfn): # Threads supported "out of the box" by the rest of the code. @@ -385,14 +391,36 @@ if item: self._shape_decompressor.setpos(item.signed[1]) return - item = self._jittablesearch.look_in_jit_table(retaddr) - if item: - self._shape_decompressor.setaddr(item) - return + gcmapstart2 = self._extra_gcmapstart() + gcmapend2 = self._extra_gcmapend() + if gcmapstart2 != gcmapend2: + # we have a non-empty JIT-produced table to look in + item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) + if item: + self._shape_decompressor.setaddr(item) + return + # maybe the JIT-produced table is not sorted? + was_already_sorted = self._extra_mark_sorted() + if not was_already_sorted: + sort_gcmap(gcmapstart2, gcmapend2) + item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) + if item: + self._shape_decompressor.setaddr(item) + return + # there is a rare risk that the array contains *two* entries + # with the same key, one of which is dead (null value), and we + # found the dead one above. Solve this case by replacing all + # dead keys with nulls, sorting again, and then trying again. + replace_dead_entries_with_nulls(gcmapstart2, gcmapend2) + sort_gcmap(gcmapstart2, gcmapend2) + item = search_in_gcmap2(gcmapstart2, gcmapend2, retaddr) + if item: + self._shape_decompressor.setaddr(item) + return # the item may have been not found because the main array was # not sorted. Sort it and try again. win32_follow_gcmap_jmp(gcmapstart, gcmapend) - gcjit.sort_gcmap(gcmapstart, gcmapend) + sort_gcmap(gcmapstart, gcmapend) item = search_in_gcmap(gcmapstart, gcmapend, retaddr) if item: self._shape_decompressor.setpos(item.signed[1]) @@ -433,8 +461,32 @@ # ____________________________________________________________ +sizeofaddr = llmemory.sizeof(llmemory.Address) +arrayitemsize = 2 * sizeofaddr + + +def binary_search(start, end, addr1): + """Search for an element in a sorted array. + + The interval from the start address (included) to the end address + (excluded) is assumed to be a sorted arrays of pairs (addr1, addr2). + This searches for the item with a given addr1 and returns its + address. If not found exactly, it tries to return the address + of the item left of addr1 (i.e. such that result.address[0] < addr1). + """ + count = (end - start) // arrayitemsize + while count > 1: + middleindex = count // 2 + middle = start + middleindex * arrayitemsize + if addr1 < middle.address[0]: + count = middleindex + else: + start = middle + count -= middleindex + return start + def search_in_gcmap(gcmapstart, gcmapend, retaddr): - item = gcjit.binary_search(gcmapstart, gcmapend, retaddr) + item = binary_search(gcmapstart, gcmapend, retaddr) if item.address[0] == retaddr: return item # found # 'retaddr' not exactly found. Check that 'item' is the start of a @@ -444,6 +496,31 @@ else: return llmemory.NULL # failed +def search_in_gcmap2(gcmapstart, gcmapend, retaddr): + # same as 'search_in_gcmap', but without range checking support + # (item.signed[1] is an address in this case, not a signed at all!) + item = binary_search(gcmapstart, gcmapend, retaddr) + if item.address[0] == retaddr: + return item.address[1] # found + else: + return llmemory.NULL # failed + +def sort_gcmap(gcmapstart, gcmapend): + count = (gcmapend - gcmapstart) // arrayitemsize + qsort(gcmapstart, + rffi.cast(rffi.SIZE_T, count), + rffi.cast(rffi.SIZE_T, arrayitemsize), + llhelper(QSORT_CALLBACK_PTR, _compare_gcmap_entries)) + +def replace_dead_entries_with_nulls(start, end): + # replace the dead entries (null value) with a null key. + count = (end - start) // arrayitemsize - 1 + while count >= 0: + item = start + count * arrayitemsize + if item.address[1] == llmemory.NULL: + item.address[0] = llmemory.NULL + count -= 1 + if sys.platform == 'win32': def win32_follow_gcmap_jmp(start, end): # The initial gcmap table contains addresses to a JMP @@ -460,6 +537,17 @@ def win32_follow_gcmap_jmp(start, end): pass +def _compare_gcmap_entries(addr1, addr2): + key1 = addr1.address[0] + key2 = addr2.address[0] + if key1 < key2: + result = -1 + elif key1 == key2: + result = 0 + else: + result = 1 + return rffi.cast(rffi.INT, result) + # ____________________________________________________________ class ShapeDecompressor: @@ -557,3 +645,14 @@ sandboxsafe=True, _nowrapper=True) c_asm_nocollect = Constant(pypy_asm_nocollect, lltype.typeOf(pypy_asm_nocollect)) + +QSORT_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([llmemory.Address, + llmemory.Address], rffi.INT)) +qsort = rffi.llexternal('qsort', + [llmemory.Address, + rffi.SIZE_T, + rffi.SIZE_T, + QSORT_CALLBACK_PTR], + lltype.Void, + sandboxsafe=True, + _nowrapper=True) diff --git a/pypy/rpython/memory/gctransform/gcjit.py b/pypy/rpython/memory/gctransform/gcjit.py deleted file mode 100644 --- a/pypy/rpython/memory/gctransform/gcjit.py +++ /dev/null @@ -1,116 +0,0 @@ -from pypy.rpython.lltypesystem import lltype, llmemory, rffi -from pypy.rpython.annlowlevel import llhelper - - -sizeofaddr = llmemory.sizeof(llmemory.Address) -arrayitemsize = 2 * sizeofaddr - - -def binary_search(start, end, addr1): - """Search for an element in a sorted array. - - The interval from the start address (included) to the end address - (excluded) is assumed to be a sorted arrays of pairs (addr1, addr2). - This searches for the item with a given addr1 and returns its - address. If not found exactly, it tries to return the address - of the item left of addr1 (i.e. such that result.address[0] < addr1). - """ - count = (end - start) // arrayitemsize - while count > 1: - middleindex = count // 2 - middle = start + middleindex * arrayitemsize - if addr1 < middle.address[0]: - count = middleindex - else: - start = middle - count -= middleindex - return start - -def search_in_gcmap_direct(gcmapstart, gcmapend, key): - # same as 'search_in_gcmap' in asmgcroot.py, but without range checking - # support (item.address[1] is an address in this case, not a int at all!) - item = binary_search(gcmapstart, gcmapend, key) - if item.address[0] == key: - return item.address[1] # found - else: - return llmemory.NULL # failed - -def sort_gcmap(gcmapstart, gcmapend): - count = (gcmapend - gcmapstart) // arrayitemsize - qsort(gcmapstart, - rffi.cast(rffi.SIZE_T, count), - rffi.cast(rffi.SIZE_T, arrayitemsize), - llhelper(QSORT_CALLBACK_PTR, _compare_gcmap_entries)) - -QSORT_CALLBACK_PTR = lltype.Ptr(lltype.FuncType([llmemory.Address, - llmemory.Address], rffi.INT)) -qsort = rffi.llexternal('qsort', - [llmemory.Address, - rffi.SIZE_T, - rffi.SIZE_T, - QSORT_CALLBACK_PTR], - lltype.Void, - sandboxsafe=True, - _nowrapper=True) - -def replace_dead_entries_with_nulls(start, end): - # replace the dead entries (null value) with a null key. - count = (end - start) // arrayitemsize - 1 - while count >= 0: - item = start + count * arrayitemsize - if item.address[1] == llmemory.NULL: - item.address[0] = llmemory.NULL - count -= 1 - -def _compare_gcmap_entries(addr1, addr2): - key1 = addr1.address[0] - key2 = addr2.address[0] - if key1 < key2: - result = -1 - elif key1 == key2: - result = 0 - else: - result = 1 - return rffi.cast(rffi.INT, result) - - -class GcJitTableSearch(object): - - def __init__(self, translator): - if hasattr(translator, '_jit2gc'): - jit2gc = translator._jit2gc - self._extra_gcmapstart = jit2gc['gcmapstart'] - self._extra_gcmapend = jit2gc['gcmapend'] - self._extra_mark_sorted = jit2gc['gcmarksorted'] - else: - self._extra_gcmapstart = lambda: llmemory.NULL - self._extra_gcmapend = lambda: llmemory.NULL - self._extra_mark_sorted = lambda: True - - def _freeze_(self): - return True - - def look_in_jit_table(self, key): - gcmapstart2 = self._extra_gcmapstart() - gcmapend2 = self._extra_gcmapend() - if gcmapstart2 == gcmapend2: - return llmemory.NULL - # we have a non-empty JIT-produced table to look in - item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) - if item: - return item - # maybe the JIT-produced table is not sorted? - was_already_sorted = self._extra_mark_sorted() - if not was_already_sorted: - sort_gcmap(gcmapstart2, gcmapend2) - item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) - if item: - return item - # there is a rare risk that the array contains *two* entries - # with the same key, one of which is dead (null value), and we - # found the dead one above. Solve this case by replacing all - # dead keys with nulls, sorting again, and then trying again. - replace_dead_entries_with_nulls(gcmapstart2, gcmapend2) - sort_gcmap(gcmapstart2, gcmapend2) - item = search_in_gcmap_direct(gcmapstart2, gcmapend2, key) - return item From commits-noreply at bitbucket.org Thu Mar 31 11:43:25 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:25 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Work in progress. A bit hard to test individual changes :-( Message-ID: <20110331094325.5A069282BE8@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43036:3f4a55febb56 Date: 2011-03-31 11:40 +0200 http://bitbucket.org/pypy/pypy/changeset/3f4a55febb56/ Log: Work in progress. A bit hard to test individual changes :-( diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -349,6 +349,8 @@ INSN_rb = insn(rex_w, chr(base+3), register(1,8), stack_bp(2)) INSN_rm = insn(rex_w, chr(base+3), register(1,8), mem_reg_plus_const(2)) INSN_rj = insn(rex_w, chr(base+3), register(1,8), '\x05', immediate(2)) + INSN_ji8 = insn(rex_w, '\x83', orbyte(base), '\x05', immediate(1), + immediate(2,'b')) INSN_bi8 = insn(rex_w, '\x83', orbyte(base), stack_bp(1), immediate(2,'b')) INSN_bi32= insn(rex_w, '\x81', orbyte(base), stack_bp(1), immediate(2)) @@ -366,7 +368,8 @@ INSN_bi32(mc, offset, immed) INSN_bi._always_inline_ = True # try to constant-fold single_byte() - return INSN_ri, INSN_rr, INSN_rb, INSN_bi, INSN_br, INSN_rm, INSN_rj + return (INSN_ri, INSN_rr, INSN_rb, INSN_bi, INSN_br, INSN_rm, INSN_rj, + INSN_ji8) def select_8_or_32_bit_immed(insn_8, insn_32): def INSN(*args): @@ -444,13 +447,13 @@ # ------------------------------ Arithmetic ------------------------------ - ADD_ri, ADD_rr, ADD_rb, _, _, ADD_rm, ADD_rj = common_modes(0) - OR_ri, OR_rr, OR_rb, _, _, OR_rm, OR_rj = common_modes(1) - AND_ri, AND_rr, AND_rb, _, _, AND_rm, AND_rj = common_modes(4) - SUB_ri, SUB_rr, SUB_rb, _, _, SUB_rm, SUB_rj = common_modes(5) - SBB_ri, SBB_rr, SBB_rb, _, _, SBB_rm, SBB_rj = common_modes(3) - XOR_ri, XOR_rr, XOR_rb, _, _, XOR_rm, XOR_rj = common_modes(6) - CMP_ri, CMP_rr, CMP_rb, CMP_bi, CMP_br, CMP_rm, CMP_rj = common_modes(7) + ADD_ri, ADD_rr, ADD_rb, _, _, ADD_rm, ADD_rj, _ = common_modes(0) + OR_ri, OR_rr, OR_rb, _, _, OR_rm, OR_rj, _ = common_modes(1) + AND_ri, AND_rr, AND_rb, _, _, AND_rm, AND_rj, _ = common_modes(4) + SUB_ri, SUB_rr, SUB_rb, _, _, SUB_rm, SUB_rj, SUB_ji8 = common_modes(5) + SBB_ri, SBB_rr, SBB_rb, _, _, SBB_rm, SBB_rj, _ = common_modes(3) + XOR_ri, XOR_rr, XOR_rb, _, _, XOR_rm, XOR_rj, _ = common_modes(6) + CMP_ri, CMP_rr, CMP_rb, CMP_bi, CMP_br, CMP_rm, CMP_rj, _ = common_modes(7) CMP_mi8 = insn(rex_w, '\x83', orbyte(7<<3), mem_reg_plus_const(1), immediate(2, 'b')) CMP_mi32 = insn(rex_w, '\x81', orbyte(7<<3), mem_reg_plus_const(1), immediate(2)) diff --git a/pypy/jit/backend/llsupport/gc.py b/pypy/jit/backend/llsupport/gc.py --- a/pypy/jit/backend/llsupport/gc.py +++ b/pypy/jit/backend/llsupport/gc.py @@ -1,3 +1,4 @@ +import os from pypy.rlib import rgc from pypy.rlib.objectmodel import we_are_translated from pypy.rlib.debug import fatalerror @@ -15,7 +16,6 @@ from pypy.jit.backend.llsupport.descr import GcCache, get_field_descr from pypy.jit.backend.llsupport.descr import GcPtrFieldDescr from pypy.jit.backend.llsupport.descr import get_call_descr -from pypy.rpython.memory.gctransform import asmgcroot # ____________________________________________________________ @@ -212,10 +212,12 @@ return addr_ref -class GcRootMap_asmgcc: +class GcRootMap_asmgcc(object): """Handles locating the stack roots in the assembler. This is the class supporting --gcrootfinder=asmgcc. """ + is_shadow_stack = False + LOC_REG = 0 LOC_ESP_PLUS = 1 LOC_EBP_PLUS = 2 @@ -224,7 +226,7 @@ GCMAP_ARRAY = rffi.CArray(lltype.Signed) CALLSHAPE_ARRAY_PTR = rffi.CArrayPtr(rffi.UCHAR) - def __init__(self): + def __init__(self, gcdescr=None): # '_gcmap' is an array of length '_gcmap_maxlength' of addresses. # '_gcmap_curlength' tells how full the array really is. # The addresses are actually grouped in pairs: @@ -237,6 +239,13 @@ self._gcmap_deadentries = 0 self._gcmap_sorted = True + def add_jit2gc_hooks(self, jit2gc): + jit2gc.update({ + 'gcmapstart': lambda: self.gcmapstart(), + 'gcmapend': lambda: self.gcmapend(), + 'gcmarksorted': lambda: self.gcmarksorted(), + }) + def initialize(self): # hack hack hack. Remove these lines and see MissingRTypeAttribute # when the rtyper tries to annotate these methods only when GC-ing... @@ -309,6 +318,7 @@ @rgc.no_collect def freeing_block(self, start, stop): + from pypy.rpython.memory.gctransform import asmgcroot # if [start:stop] is a raw block of assembler, then look up the # corresponding gcroot markers, and mark them as freed now in # self._gcmap by setting the 2nd address of every entry to NULL. @@ -365,7 +375,7 @@ number >>= 7 shape.append(chr(number | flag)) - def add_ebp_offset(self, shape, offset): + def add_frame_offset(self, shape, offset): assert (offset & 3) == 0 if offset >= 0: num = self.LOC_EBP_PLUS | offset @@ -388,6 +398,125 @@ return rawaddr +class GcRootMap_shadowstack(object): + """Handles locating the stack roots in the assembler. + This is the class supporting --gcrootfinder=shadowstack. + """ + is_shadow_stack = True + MARKER = 8 + + # The "shadowstack" is a portable way in which the GC finds the + # roots that live in the stack. Normally it is just a list of + # pointers to GC objects. The pointers may be moved around by a GC + # collection. But with the JIT, an entry can also be MARKER, in + # which case the next entry points to an assembler stack frame. + # During a residual CALL from the assembler (which may indirectly + # call the GC), we use the force_index stored in the assembler + # stack frame to identify the call: we can go from the force_index + # to a list of where the GC pointers are in the frame (this is the + # purpose of the present class). + # + # Note that across CALL_MAY_FORCE or CALL_ASSEMBLER, we can also go + # from the force_index to a ResumeGuardForcedDescr instance, which + # is used if the virtualizable or the virtualrefs need to be forced + # (see pypy.jit.backend.model). The force_index number in the stack + # frame is initially set to a non-negative value x, but it is + # occasionally turned into (~x) in case of forcing. + + INTARRAYPTR = rffi.CArrayPtr(rffi.INT) + CALLSHAPES_ARRAY = rffi.CArray(INTARRAYPTR) + + def __init__(self, gcdescr): + self._callshapes = lltype.nullptr(self.CALLSHAPES_ARRAY) + self._callshapes_maxlength = 0 + self.force_index_ofs = gcdescr.force_index_ofs + + def add_jit2gc_hooks(self, jit2gc): + # + def collect_jit_stack_root(callback, gc, addr): + if addr.signed[0] != GcRootMap_shadowstack.MARKER: + # common case + if gc.points_to_valid_gc_object(addr): + callback(gc, addr) + return WORD + else: + # case of a MARKER followed by an assembler stack frame + self.follow_stack_frame_of_assembler(callback, gc, addr) + return 2 * WORD + # + jit2gc.update({ + 'rootstackhook': collect_jit_stack_root, + }) + + def initialize(self): + pass + + def follow_stack_frame_of_assembler(self, callback, gc, addr): + frame_addr = addr.signed[1] + addr = llmemory.cast_int_to_adr(frame_addr + self.force_index_ofs) + force_index = addr.signed[0] + if force_index < 0: + force_index = ~force_index + callshape = self._callshapes[force_index] + n = 0 + while True: + offset = rffi.cast(lltype.Signed, callshape[n]) + if offset == 0: + break + addr = llmemory.cast_int_to_adr(frame_addr + offset) + callback(gc, addr) + n += 1 + + def get_basic_shape(self, is_64_bit=False): + return [] + + def add_frame_offset(self, shape, offset): + assert offset != 0 + shape.append(offset) + + def add_callee_save_reg(self, shape, register): + msg = "GC pointer in %s was not spilled" % register + os.write(2, '[llsupport/gc] %s\n' % msg) + raise AssertionError(msg) + + def compress_callshape(self, shape, datablockwrapper): + length = len(shape) + SZINT = rffi.sizeof(rffi.INT) + rawaddr = datablockwrapper.malloc_aligned((length + 1) * SZINT, SZINT) + p = rffi.cast(self.INTARRAYPTR, rawaddr) + for i in range(length): + p[i] = rffi.cast(rffi.INT, shape[i]) + p[length] = rffi.cast(rffi.INT, 0) + return p + + def write_callshape(self, p, force_index): + if force_index >= self._callshapes_maxlength: + self._enlarge_callshape_list(force_index + 1) + self._callshapes[force_index] = p + + def _enlarge_callshape_list(self, minsize): + newlength = 250 + (self._callshapes_maxlength // 3) * 4 + if newlength < minsize: + newlength = minsize + newarray = lltype.malloc(self.CALLSHAPES_ARRAY, newlength, + flavor='raw', track_allocation=False) + if self._callshapes: + i = self._callshapes_maxlength - 1 + while i >= 0: + newarray[i] = self._callshapes[i] + i -= 1 + lltype.free(self._callshapes, flavor='raw') + self._callshapes = newarray + self._callshapes_maxlength = newlength + + def freeing_block(self, start, stop): + pass # nothing needed here + + def get_root_stack_top_addr(self): + rst_addr = llop.gc_adr_of_root_stack_top(llmemory.Address) + return rffi.cast(lltype.Signed, rst_addr) + + class WriteBarrierDescr(AbstractDescr): def __init__(self, gc_ll_descr): self.llop1 = gc_ll_descr.llop1 @@ -437,7 +566,7 @@ except KeyError: raise NotImplementedError("--gcrootfinder=%s not implemented" " with the JIT" % (name,)) - gcrootmap = cls() + gcrootmap = cls(gcdescr) self.gcrootmap = gcrootmap self.gcrefs = GcRefList() self.single_gcref_descr = GcPtrFieldDescr('', 0) @@ -446,12 +575,9 @@ # where it can be fished and reused by the FrameworkGCTransformer self.layoutbuilder = framework.TransformerLayoutBuilder(translator) self.layoutbuilder.delay_encoding() - self.translator._jit2gc = { - 'layoutbuilder': self.layoutbuilder, - 'gcmapstart': lambda: gcrootmap.gcmapstart(), - 'gcmapend': lambda: gcrootmap.gcmapend(), - 'gcmarksorted': lambda: gcrootmap.gcmarksorted(), - } + self.translator._jit2gc = {'layoutbuilder': self.layoutbuilder} + gcrootmap.add_jit2gc_hooks(self.translator._jit2gc) + self.GCClass = self.layoutbuilder.GCClass self.moving_gc = self.GCClass.moving_gc self.HDRPTR = lltype.Ptr(self.GCClass.HDR) diff --git a/pypy/rpython/memory/gctransform/framework.py b/pypy/rpython/memory/gctransform/framework.py --- a/pypy/rpython/memory/gctransform/framework.py +++ b/pypy/rpython/memory/gctransform/framework.py @@ -506,6 +506,10 @@ s_gc = self.translator.annotator.bookkeeper.valueoftype(GCClass) r_gc = self.translator.rtyper.getrepr(s_gc) self.c_const_gc = rmodel.inputconst(r_gc, self.gcdata.gc) + s_gc_data = self.translator.annotator.bookkeeper.valueoftype( + gctypelayout.GCData) + r_gc_data = self.translator.rtyper.getrepr(s_gc_data) + self.c_const_gcdata = rmodel.inputconst(r_gc_data, self.gcdata) self.malloc_zero_filled = GCClass.malloc_zero_filled HDR = self.HDR = self.gcdata.gc.gcheaderbuilder.HDR @@ -792,6 +796,15 @@ resulttype=llmemory.Address) hop.genop('adr_add', [v_gc_adr, c_ofs], resultvar=op.result) + def gct_gc_adr_of_root_stack_top(self, hop): + op = hop.spaceop + ofs = llmemory.offsetof(self.c_const_gcdata.concretetype.TO, + 'inst_root_stack_top') + c_ofs = rmodel.inputconst(lltype.Signed, ofs) + v_gcdata_adr = hop.genop('cast_ptr_to_adr', [self.c_const_gcdata], + resulttype=llmemory.Address) + hop.genop('adr_add', [v_gcdata_adr, c_ofs], resultvar=op.result) + def gct_gc_x_swap_pool(self, hop): op = hop.spaceop [v_malloced] = op.args @@ -1336,10 +1349,9 @@ self.rootstackhook = gctransformer.root_stack_jit_hook if self.rootstackhook is None: def collect_stack_root(callback, gc, addr): - if we_are_translated(): - ll_assert(addr.address[0].signed[0] != 0, - "unexpected null object header") - callback(gc, addr) + if gc.points_to_valid_gc_object(addr): + callback(gc, addr) + return sizeofaddr self.rootstackhook = collect_stack_root def push_stack(self, addr): @@ -1367,9 +1379,7 @@ addr = gcdata.root_stack_base end = gcdata.root_stack_top while addr != end: - if gc.points_to_valid_gc_object(addr): - rootstackhook(collect_stack_root, gc, addr) - addr += sizeofaddr + addr += rootstackhook(collect_stack_root, gc, addr) if self.collect_stacks_from_other_threads is not None: self.collect_stacks_from_other_threads(collect_stack_root) @@ -1480,9 +1490,7 @@ end = stacktop - sizeofaddr addr = end.address[0] while addr != end: - if gc.points_to_valid_gc_object(addr): - rootstackhook(callback, gc, addr) - addr += sizeofaddr + addr += rootstackhook(callback, gc, addr) def collect_more_stacks(callback): ll_assert(get_aid() == gcdata.active_thread, diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -837,6 +837,7 @@ self.rm.possibly_free_vars_for_op(op) def _fastpath_malloc(self, op, descr): + XXX assert isinstance(descr, BaseSizeDescr) gc_ll_descr = self.assembler.cpu.gc_ll_descr self.rm.force_allocate_reg(op.result, selected_reg=eax) @@ -859,7 +860,8 @@ def consider_new(self, op): gc_ll_descr = self.assembler.cpu.gc_ll_descr - if gc_ll_descr.can_inline_malloc(op.getdescr()): + os.write(2, "fixme: consider_new\n") + if 0 and gc_ll_descr.can_inline_malloc(op.getdescr()): # XXX self._fastpath_malloc(op, op.getdescr()) else: args = gc_ll_descr.args_for_new(op.getdescr()) @@ -869,7 +871,8 @@ def consider_new_with_vtable(self, op): classint = op.getarg(0).getint() descrsize = heaptracker.vtable2descr(self.assembler.cpu, classint) - if self.assembler.cpu.gc_ll_descr.can_inline_malloc(descrsize): + os.write(2, "fixme: consider_new_with_vtable\n") + if 0 and self.assembler.cpu.gc_ll_descr.can_inline_malloc(descrsize): # XXX self._fastpath_malloc(op, descrsize) self.assembler.set_vtable(eax, imm(classint)) # result of fastpath malloc is in eax @@ -1132,7 +1135,7 @@ # call memcpy() self.rm.before_call() self.xrm.before_call() - self.assembler._emit_call(imm(self.assembler.memcpy_addr), + self.assembler._emit_call(-1, imm(self.assembler.memcpy_addr), [dstaddr_loc, srcaddr_loc, length_loc]) self.rm.possibly_free_var(length_box) self.rm.possibly_free_var(dstaddr_box) @@ -1205,7 +1208,7 @@ for v, val in self.fm.frame_bindings.items(): if (isinstance(v, BoxPtr) and self.rm.stays_alive(v)): assert isinstance(val, StackLoc) - gcrootmap.add_ebp_offset(shape, get_ebp_ofs(val.position)) + gcrootmap.add_frame_offset(shape, get_ebp_ofs(val.position)) for v, reg in self.rm.reg_bindings.items(): if reg is eax: continue # ok to ignore this one diff --git a/pypy/jit/backend/model.py b/pypy/jit/backend/model.py --- a/pypy/jit/backend/model.py +++ b/pypy/jit/backend/model.py @@ -23,18 +23,22 @@ self.fail_descr_list = [] self.fail_descr_free_list = [] + def reserve_some_free_fail_descr_number(self): + lst = self.fail_descr_list + if len(self.fail_descr_free_list) > 0: + n = self.fail_descr_free_list.pop() + assert lst[n] is None + else: + n = len(lst) + lst.append(None) + return n + def get_fail_descr_number(self, descr): assert isinstance(descr, history.AbstractFailDescr) n = descr.index if n < 0: - lst = self.fail_descr_list - if len(self.fail_descr_free_list) > 0: - n = self.fail_descr_free_list.pop() - assert lst[n] is None - lst[n] = descr - else: - n = len(lst) - lst.append(descr) + n = self.reserve_some_free_fail_descr_number() + self.fail_descr_list[n] = descr descr.index = n return n @@ -294,6 +298,13 @@ def record_faildescr_index(self, n): self.faildescr_indices.append(n) + def reserve_and_record_some_faildescr_index(self): + # like record_faildescr_index(), but invent and return a new, + # unused faildescr index + n = self.cpu.reserve_some_free_fail_descr_number() + self.record_faildescr_index(n) + return n + def compiling_a_bridge(self): self.cpu.total_compiled_bridges += 1 self.bridges_count += 1 diff --git a/pypy/jit/backend/x86/runner.py b/pypy/jit/backend/x86/runner.py --- a/pypy/jit/backend/x86/runner.py +++ b/pypy/jit/backend/x86/runner.py @@ -19,6 +19,8 @@ def __init__(self, rtyper, stats, opts=None, translate_support_code=False, gcdescr=None): + if gcdescr is not None: + gcdescr.force_index_ofs = FORCE_INDEX_OFS AbstractLLCPU.__init__(self, rtyper, stats, opts, translate_support_code, gcdescr) @@ -127,7 +129,7 @@ fail_index = rffi.cast(TP, addr_of_force_index)[0] assert fail_index >= 0, "already forced!" faildescr = self.get_fail_descr_from_number(fail_index) - rffi.cast(TP, addr_of_force_index)[0] = -1 + rffi.cast(TP, addr_of_force_index)[0] = ~fail_index frb = self.assembler._find_failure_recovery_bytecode(faildescr) bytecode = rffi.cast(rffi.UCHARP, frb) # start of "no gc operation!" block diff --git a/pypy/rpython/lltypesystem/lloperation.py b/pypy/rpython/lltypesystem/lloperation.py --- a/pypy/rpython/lltypesystem/lloperation.py +++ b/pypy/rpython/lltypesystem/lloperation.py @@ -487,7 +487,9 @@ # ^^^ returns an address of nursery free pointer, for later modifications 'gc_adr_of_nursery_top' : LLOp(), # ^^^ returns an address of pointer, since it can change at runtime - + 'gc_adr_of_root_stack_top': LLOp(), + # ^^^ returns the address of gcdata.root_stack_top (for shadowstack only) + # experimental operations in support of thread cloning, only # implemented by the Mark&Sweep GC 'gc_x_swap_pool': LLOp(canraise=(MemoryError,), canunwindgc=True), diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -171,25 +171,42 @@ self.float_const_abs_addr = float_constants + 16 def _build_malloc_fixedsize_slowpath(self): + # With asmgcc, we need two helpers, so that we can write two CALL + # instructions in assembler, with a mark_gc_roots in between. + # With shadowstack, this is not needed, so we produce a single helper. + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + # # ---------- first helper for the slow path of malloc ---------- mc = codebuf.MachineCodeBlockWrapper() if self.cpu.supports_floats: # save the XMM registers in for i in range(self.cpu.NUM_REGS):# the *caller* frame, from esp+8 mc.MOVSD_sx((WORD*2)+8*i, i) mc.SUB_rr(edx.value, eax.value) # compute the size we want - if IS_X86_32: - mc.MOV_sr(WORD, edx.value) # save it as the new argument - elif IS_X86_64: - # rdi can be clobbered: its content was forced to the stack - # by _fastpath_malloc(), like all other save_around_call_regs. - mc.MOV_rr(edi.value, edx.value) - addr = self.cpu.gc_ll_descr.get_malloc_fixedsize_slowpath_addr() - mc.JMP(imm(addr)) # tail call to the real malloc - rawstart = mc.materialize(self.cpu.asmmemmgr, []) - self.malloc_fixedsize_slowpath1 = rawstart - # ---------- second helper for the slow path of malloc ---------- - mc = codebuf.MachineCodeBlockWrapper() + # + if gcrootmap.is_shadow_stack: + # ---- shadowstack ---- + mc.SUB_ri(esp.value, 16 - WORD) # stack alignment of 16 bytes + if IS_X86_32: + mc.MOV_sr(0, edx.value) # push argument + elif IS_X86_64: + mc.MOV_rr(edi.value, edx.value) + mc.CALL(imm(addr)) + mc.ADD_ri(esp.value, 16 - WORD) + else: + # ---- asmgcc ---- + if IS_X86_32: + mc.MOV_sr(WORD, edx.value) # save it as the new argument + elif IS_X86_64: + # rdi can be clobbered: its content was forced to the stack + # by _fastpath_malloc(), like all other save_around_call_regs. + mc.MOV_rr(edi.value, edx.value) + mc.JMP(imm(addr)) # tail call to the real malloc + rawstart = mc.materialize(self.cpu.asmmemmgr, []) + self.malloc_fixedsize_slowpath1 = rawstart + # ---------- second helper for the slow path of malloc ---------- + mc = codebuf.MachineCodeBlockWrapper() + # if self.cpu.supports_floats: # restore the XMM registers for i in range(self.cpu.NUM_REGS):# from where they were saved mc.MOVSD_xs(i, (WORD*2)+8*i) @@ -550,6 +567,10 @@ for regloc in self.cpu.CALLEE_SAVE_REGISTERS: self.mc.PUSH_r(regloc.value) + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + if gcrootmap and gcrootmap.is_shadow_stack: + self._call_header_shadowstack(gcrootmap) + def _call_header_with_stack_check(self): if self.stack_check_slowpath == 0: pass # no stack check (e.g. not translated) @@ -571,12 +592,32 @@ def _call_footer(self): self.mc.LEA_rb(esp.value, -len(self.cpu.CALLEE_SAVE_REGISTERS) * WORD) + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + if gcrootmap and gcrootmap.is_shadow_stack: + self._call_footer_shadowstack(gcrootmap) + for i in range(len(self.cpu.CALLEE_SAVE_REGISTERS)-1, -1, -1): self.mc.POP_r(self.cpu.CALLEE_SAVE_REGISTERS[i].value) self.mc.POP_r(ebp.value) self.mc.RET() + def _call_header_shadowstack(self, gcrootmap): + # we need to put two words into the shadowstack: the MARKER + # and the address of the frame (ebp, actually) + rst = gcrootmap.get_root_stack_top_addr() + assert rx86.fits_in_32bits(rst) + self.mc.MOV_rj(eax.value, rst) # MOV eax, [rootstacktop] + self.mc.LEA_rm(edx.value, (eax.value, 2*WORD)) # LEA edx, [eax+2*WORD] + self.mc.MOV_mi((eax.value, 0), gcrootmap.MARKER) # MOV [eax], MARKER + self.mc.MOV_mr((eax.value, WORD), ebp.value) # MOV [eax+WORD], ebp + self.mc.MOV_jr(rst, edx.value) # MOV [rootstacktop], edx + + def _call_footer_shadowstack(self, gcrootmap): + rst = gcrootmap.get_root_stack_top_addr() + assert rx86.fits_in_32bits(rst) + self.mc.SUB_ji8(rst, 2*WORD) # SUB [rootstacktop], 2*WORD + def _assemble_bootstrap_direct_call(self, arglocs, jmppos, stackdepth): if IS_X86_64: return self._assemble_bootstrap_direct_call_64(arglocs, jmppos, stackdepth) @@ -896,7 +937,7 @@ self.implement_guard(guard_token, checkfalsecond) return genop_cmp_guard_float - def _emit_call(self, x, arglocs, start=0, tmp=eax): + def _emit_call(self, force_index, x, arglocs, start=0, tmp=eax): if IS_X86_64: return self._emit_call_64(x, arglocs, start) @@ -924,9 +965,9 @@ self._regalloc.reserve_param(p//WORD) # x is a location self.mc.CALL(x) - self.mark_gc_roots() + self.mark_gc_roots(force_index) - def _emit_call_64(self, x, arglocs, start=0): + def _emit_call_64(self, force_index, x, arglocs, start=0): src_locs = [] dst_locs = [] xmm_src_locs = [] @@ -984,12 +1025,27 @@ self._regalloc.reserve_param(len(pass_on_stack)) self.mc.CALL(x) - self.mark_gc_roots() + self.mark_gc_roots(force_index) def call(self, addr, args, res): - self._emit_call(imm(addr), args) + force_index = self.write_new_force_index() + self._emit_call(force_index, imm(addr), args) assert res is eax + def write_new_force_index(self): + # for shadowstack only: get a new, unused force_index number and + # write it to FORCE_INDEX_OFS. Used to record the call shape + # (i.e. where the GC pointers are in the stack) around a CALL + # instruction that doesn't already have a force_index. + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + if gcrootmap and gcrootmap.is_shadow_stack: + clt = self.currently_compiling_loop.compiled_loop_token + force_index = clt.reserve_and_record_some_faildescr_index() + self.mc.MOV_bi(FORCE_INDEX_OFS, force_index) + return force_index + else: + return 0 + genop_int_neg = _unaryop("NEG") genop_int_invert = _unaryop("NOT") genop_int_add = _binaryop("ADD", True) @@ -1796,8 +1852,9 @@ tmp = ecx else: tmp = eax - - self._emit_call(x, arglocs, 3, tmp=tmp) + + force_index = self.write_new_force_index() + self._emit_call(force_index, x, arglocs, 3, tmp=tmp) if IS_X86_32 and isinstance(resloc, StackLoc) and resloc.width == 8: # a float or a long long return @@ -1842,8 +1899,8 @@ assert len(arglocs) - 2 == len(descr._x86_arglocs[0]) # # Write a call to the direct_bootstrap_code of the target assembler - self._emit_call(imm(descr._x86_direct_bootstrap_code), arglocs, 2, - tmp=eax) + self._emit_call(fail_index, imm(descr._x86_direct_bootstrap_code), + arglocs, 2, tmp=eax) if op.result is None: assert result_loc is None value = self.cpu.done_with_this_frame_void_v @@ -1868,7 +1925,7 @@ jd = descr.outermost_jitdriver_sd assert jd is not None asm_helper_adr = self.cpu.cast_adr_to_int(jd.assembler_helper_adr) - self._emit_call(imm(asm_helper_adr), [eax, arglocs[1]], 0, + self._emit_call(fail_index, imm(asm_helper_adr), [eax, arglocs[1]], 0, tmp=ecx) if IS_X86_32 and isinstance(result_loc, StackLoc) and result_loc.type == FLOAT: self.mc.FSTP_b(result_loc.value) @@ -1990,11 +2047,16 @@ not_implemented("not implemented operation (guard): %s" % op.getopname()) - def mark_gc_roots(self): + def mark_gc_roots(self, force_index): + if force_index < 0: + return # not needed gcrootmap = self.cpu.gc_ll_descr.gcrootmap if gcrootmap: mark = self._regalloc.get_mark_gc_roots(gcrootmap) - self.mc.insert_gcroot_marker(mark) + if gcrootmap.is_shadow_stack: + gcrootmap.write_callshape(mark, force_index) + else: + self.mc.insert_gcroot_marker(mark) def target_arglocs(self, loop_token): return loop_token._x86_arglocs @@ -2025,11 +2087,16 @@ # result in EAX; slowpath_addr2 additionally returns in EDX a # copy of heap(nursery_free_adr), so that the final MOV below is # a no-op. - slowpath_addr1 = self.malloc_fixedsize_slowpath1 + # reserve room for the argument to the real malloc and the # 8 saved XMM regs self._regalloc.reserve_param(1+16) - self.mc.CALL(imm(slowpath_addr1)) + + gcrootmap = self.cpu.gc_ll_descr.gcrootmap + if not gcrootmap.is_shadow_stack: + # there are two helpers to call only with asmgcc + slowpath_addr1 = self.malloc_fixedsize_slowpath1 + self.mc.CALL(imm(slowpath_addr1)) self.mark_gc_roots() slowpath_addr2 = self.malloc_fixedsize_slowpath2 self.mc.CALL(imm(slowpath_addr2)) @@ -2038,6 +2105,7 @@ assert 0 < offset <= 127 self.mc.overwrite(jmp_adr-1, chr(offset)) # on 64-bits, 'tid' is a value that fits in 31 bits + assert rx86.fits_in_32bits(tid) self.mc.MOV_mi((eax.value, 0), tid) self.mc.MOV(heap(nursery_free_adr), edx) diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -127,7 +127,7 @@ # ______________________________________________________________________ -class TestCompileFramework(object): +class CompileFrameworkTests(object): # Test suite using (so far) the minimark GC. def setup_class(cls): funcs = [] @@ -178,7 +178,7 @@ try: GcLLDescr_framework.DEBUG = True cls.cbuilder = compile(get_entry(allfuncs), DEFL_GC, - gcrootfinder="asmgcc", jit=True) + gcrootfinder=cls.gcrootfinder, jit=True) finally: GcLLDescr_framework.DEBUG = OLD_DEBUG @@ -576,3 +576,10 @@ def test_compile_framework_minimal_size_in_nursery(self): self.run('compile_framework_minimal_size_in_nursery') + + +class TestShadowStack(CompileFrameworkTests): + gcrootfinder = "shadowstack" + +class TestAsmGcc(CompileFrameworkTests): + gcrootfinder = "asmgcc" From commits-noreply at bitbucket.org Thu Mar 31 11:43:27 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:27 +0200 (CEST) Subject: [pypy-svn] pypy default: Simplify, clean-up and document this function a bit more. Message-ID: <20110331094327.A3EC4282BE8@codespeak.net> Author: Armin Rigo Branch: Changeset: r43037:622aebc7b967 Date: 2011-03-31 11:40 +0200 http://bitbucket.org/pypy/pypy/changeset/622aebc7b967/ Log: Simplify, clean-up and document this function a bit more. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -200,18 +200,26 @@ self.malloc_fixedsize_slowpath2 = rawstart def _build_stack_check_slowpath(self): - from pypy.rlib import rstack _, _, slowpathaddr = self.cpu.insert_stack_check() if slowpathaddr == 0 or self.cpu.exit_frame_with_exception_v < 0: return # no stack check (for tests, or non-translated) # + # make a "function" that is called immediately at the start of + # an assembler function. In particular, the stack looks like: + # + # | ... | <-- aligned to a multiple of 16 + # | retaddr of caller | + # | my own retaddr | <-- esp + # +---------------------+ + # mc = codebuf.MachineCodeBlockWrapper() - mc.PUSH_r(ebp.value) - mc.MOV_rr(ebp.value, esp.value) # - if IS_X86_64: + if IS_X86_32: + stack_size = WORD + elif IS_X86_64: # on the x86_64, we have to save all the registers that may # have been used to pass arguments + stack_size = WORD + 6*WORD + 8*8 for reg in [edi, esi, edx, ecx, r8, r9]: mc.PUSH_r(reg.value) mc.SUB_ri(esp.value, 8*8) @@ -220,11 +228,12 @@ # if IS_X86_32: mc.LEA_rb(eax.value, +8) + mc.PUSH_r(eax.value) # alignment mc.PUSH_r(eax.value) elif IS_X86_64: mc.LEA_rb(edi.value, +16) - mc.AND_ri(esp.value, -16) # + # esp is now aligned to a multiple of 16 again mc.CALL(imm(slowpathaddr)) # mc.MOV(eax, heap(self.cpu.pos_exception())) @@ -236,12 +245,10 @@ # restore the registers for i in range(7, -1, -1): mc.MOVSD_xs(i, 8*i) - for i, reg in [(6, r9), (5, r8), (4, ecx), - (3, edx), (2, esi), (1, edi)]: - mc.MOV_rb(reg.value, -8*i) + mc.ADD_ri(esp.value, 8*8) + for reg in [r9, r8, ecx, edx, esi, edi]: + mc.POP_r(reg.value) # - mc.MOV_rr(esp.value, ebp.value) - mc.POP_r(ebp.value) mc.RET() # # patch the JNZ above @@ -266,9 +273,7 @@ # function, and will instead return to the caller's caller. Note # also that we completely ignore the saved arguments, because we # are interrupting the function. - mc.MOV_rr(esp.value, ebp.value) - mc.POP_r(ebp.value) - mc.ADD_ri(esp.value, WORD) + mc.ADD_ri(esp.value, stack_size) mc.RET() # rawstart = mc.materialize(self.cpu.asmmemmgr, []) From commits-noreply at bitbucket.org Thu Mar 31 11:43:28 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:28 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: hg merge default Message-ID: <20110331094328.A2A2D282BE8@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43038:ce1ffe4565e2 Date: 2011-03-31 11:41 +0200 http://bitbucket.org/pypy/pypy/changeset/ce1ffe4565e2/ Log: hg merge default diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -217,18 +217,26 @@ self.malloc_fixedsize_slowpath2 = rawstart def _build_stack_check_slowpath(self): - from pypy.rlib import rstack _, _, slowpathaddr = self.cpu.insert_stack_check() if slowpathaddr == 0 or self.cpu.exit_frame_with_exception_v < 0: return # no stack check (for tests, or non-translated) # + # make a "function" that is called immediately at the start of + # an assembler function. In particular, the stack looks like: + # + # | ... | <-- aligned to a multiple of 16 + # | retaddr of caller | + # | my own retaddr | <-- esp + # +---------------------+ + # mc = codebuf.MachineCodeBlockWrapper() - mc.PUSH_r(ebp.value) - mc.MOV_rr(ebp.value, esp.value) # - if IS_X86_64: + if IS_X86_32: + stack_size = WORD + elif IS_X86_64: # on the x86_64, we have to save all the registers that may # have been used to pass arguments + stack_size = WORD + 6*WORD + 8*8 for reg in [edi, esi, edx, ecx, r8, r9]: mc.PUSH_r(reg.value) mc.SUB_ri(esp.value, 8*8) @@ -237,11 +245,12 @@ # if IS_X86_32: mc.LEA_rb(eax.value, +8) + mc.PUSH_r(eax.value) # alignment mc.PUSH_r(eax.value) elif IS_X86_64: mc.LEA_rb(edi.value, +16) - mc.AND_ri(esp.value, -16) # + # esp is now aligned to a multiple of 16 again mc.CALL(imm(slowpathaddr)) # mc.MOV(eax, heap(self.cpu.pos_exception())) @@ -253,12 +262,10 @@ # restore the registers for i in range(7, -1, -1): mc.MOVSD_xs(i, 8*i) - for i, reg in [(6, r9), (5, r8), (4, ecx), - (3, edx), (2, esi), (1, edi)]: - mc.MOV_rb(reg.value, -8*i) + mc.ADD_ri(esp.value, 8*8) + for reg in [r9, r8, ecx, edx, esi, edi]: + mc.POP_r(reg.value) # - mc.MOV_rr(esp.value, ebp.value) - mc.POP_r(ebp.value) mc.RET() # # patch the JNZ above @@ -283,9 +290,7 @@ # function, and will instead return to the caller's caller. Note # also that we completely ignore the saved arguments, because we # are interrupting the function. - mc.MOV_rr(esp.value, ebp.value) - mc.POP_r(ebp.value) - mc.ADD_ri(esp.value, WORD) + mc.ADD_ri(esp.value, stack_size) mc.RET() # rawstart = mc.materialize(self.cpu.asmmemmgr, []) From commits-noreply at bitbucket.org Thu Mar 31 11:43:29 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 11:43:29 +0200 (CEST) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110331094329.0E659282BE9@codespeak.net> Author: Armin Rigo Branch: Changeset: r43039:5538845eb38e Date: 2011-03-31 11:42 +0200 http://bitbucket.org/pypy/pypy/changeset/5538845eb38e/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 31 11:55:37 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 31 Mar 2011 11:55:37 +0200 (CEST) Subject: [pypy-svn] pypy default: Fix one test suite failure in test_import on Windows Message-ID: <20110331095537.80A90282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43040:558c510f7303 Date: 2011-03-31 11:30 +0200 http://bitbucket.org/pypy/pypy/changeset/558c510f7303/ Log: Fix one test suite failure in test_import on Windows diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -177,6 +177,14 @@ import a assert a == a0 + def test_trailing_slash(self): + import sys + try: + sys.path[0] += '/' + import a + finally: + sys.path[0] = sys.path[0].rstrip('/') + def test_import_pkg(self): import sys import pkg diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -84,7 +84,9 @@ else: # XXX that's slow def case_ok(filename): - index = filename.rfind(os.sep) + index1 = filename.rfind(os.sep) + index2 = filename.rfind(os.altsep) + index = max(index1, index2) if index < 0: directory = os.curdir else: From commits-noreply at bitbucket.org Thu Mar 31 11:55:38 2011 From: commits-noreply at bitbucket.org (amauryfa) Date: Thu, 31 Mar 2011 11:55:38 +0200 (CEST) Subject: [pypy-svn] pypy default: Let pypy import .pyw files on Windows Message-ID: <20110331095538.963C8282B90@codespeak.net> Author: Amaury Forgeot d'Arc Branch: Changeset: r43041:dedfebb34586 Date: 2011-03-31 11:53 +0200 http://bitbucket.org/pypy/pypy/changeset/dedfebb34586/ Log: Let pypy import .pyw files on Windows diff --git a/pypy/module/imp/test/test_import.py b/pypy/module/imp/test/test_import.py --- a/pypy/module/imp/test/test_import.py +++ b/pypy/module/imp/test/test_import.py @@ -6,6 +6,7 @@ from pypy.tool.udir import udir from pypy.rlib import streamio from pypy.conftest import gettestobjspace +import pytest import sys, os import tempfile, marshal @@ -109,6 +110,14 @@ p.join('lone.pyc').write(p.join('x.pyc').read(mode='rb'), mode='wb') + # create a .pyw file + p = setuppkg("windows", x = "x = 78") + try: + p.join('x.pyw').remove() + except py.error.ENOENT: + pass + p.join('x.py').rename(p.join('x.pyw')) + return str(root) @@ -333,6 +342,11 @@ import compiled.x assert compiled.x == sys.modules.get('compiled.x') + @pytest.mark.skipif("sys.platform != 'win32'") + def test_pyw(self): + import windows.x + assert windows.x.__file__.endswith('x.pyw') + def test_cannot_write_pyc(self): import sys, os p = os.path.join(sys.path[-1], 'readonly') diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -58,6 +58,12 @@ if os.path.exists(pyfile) and case_ok(pyfile): return PY_SOURCE, ".py", "U" + # on Windows, also check for a .pyw file + if sys.platform == 'win32': + pyfile = filepart + ".pyw" + if os.path.exists(pyfile) and case_ok(pyfile): + return PY_SOURCE, ".pyw", "U" + # The .py file does not exist. By default on PyPy, lonepycfiles # is False: if a .py file does not exist, we don't even try to # look for a lone .pyc file. From commits-noreply at bitbucket.org Thu Mar 31 13:55:52 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:55:52 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: In-progress. Message-ID: <20110331115552.C237C282B90@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43042:cf2056d85fa5 Date: 2011-03-31 13:24 +0200 http://bitbucket.org/pypy/pypy/changeset/cf2056d85fa5/ Log: In-progress. diff --git a/pypy/jit/backend/llsupport/test/test_gc.py b/pypy/jit/backend/llsupport/test/test_gc.py --- a/pypy/jit/backend/llsupport/test/test_gc.py +++ b/pypy/jit/backend/llsupport/test/test_gc.py @@ -75,8 +75,8 @@ num2a = ((-num2|3) >> 7) | 128 num2b = (-num2|3) & 127 shape = gcrootmap.get_basic_shape() - gcrootmap.add_ebp_offset(shape, num1) - gcrootmap.add_ebp_offset(shape, num2) + gcrootmap.add_frame_offset(shape, num1) + gcrootmap.add_frame_offset(shape, num2) assert shape == map(chr, [6, 7, 11, 15, 2, 0, num1a, num2b, num2a]) gcrootmap.add_callee_save_reg(shape, 1) assert shape == map(chr, [6, 7, 11, 15, 2, 0, num1a, num2b, num2a, @@ -181,51 +181,74 @@ p = rffi.cast(rffi.CArrayPtr(llmemory.Address), gcmapstart) p = rffi.ptradd(p, 2*i) return llmemory.cast_ptr_to_adr(p) - saved = gc.asmgcroot - try: - gc.asmgcroot = Asmgcroot() - # - gcrootmap = GcRootMap_asmgcc() - gcrootmap._gcmap = lltype.malloc(gcrootmap.GCMAP_ARRAY, - 1400, flavor='raw', - immortal=True) - for i in range(700): - gcrootmap._gcmap[i*2] = 1200000 + i - gcrootmap._gcmap[i*2+1] = i * 100 + 1 - assert gcrootmap._gcmap_deadentries == 0 - assert gc.asmgcroot.sort_count == 0 - gcrootmap._gcmap_maxlength = 1400 - gcrootmap._gcmap_curlength = 1400 - gcrootmap._gcmap_sorted = False - # - gcrootmap.freeing_block(1200000 - 100, 1200000) - assert gcrootmap._gcmap_deadentries == 0 - assert gc.asmgcroot.sort_count == 1 - # - gcrootmap.freeing_block(1200000 + 100, 1200000 + 200) - assert gcrootmap._gcmap_deadentries == 100 - assert gc.asmgcroot.sort_count == 1 - for i in range(700): - if 100 <= i < 200: - expected = 0 - else: - expected = i * 100 + 1 - assert gcrootmap._gcmap[i*2] == 1200000 + i - assert gcrootmap._gcmap[i*2+1] == expected - # - gcrootmap.freeing_block(1200000 + 650, 1200000 + 750) - assert gcrootmap._gcmap_deadentries == 150 - assert gc.asmgcroot.sort_count == 1 - for i in range(700): - if 100 <= i < 200 or 650 <= i: - expected = 0 - else: - expected = i * 100 + 1 - assert gcrootmap._gcmap[i*2] == 1200000 + i - assert gcrootmap._gcmap[i*2+1] == expected + asmgcroot = Asmgcroot() # - finally: - gc.asmgcroot = saved + gcrootmap = GcRootMap_asmgcc() + gcrootmap._gcmap = lltype.malloc(gcrootmap.GCMAP_ARRAY, + 1400, flavor='raw', + immortal=True) + for i in range(700): + gcrootmap._gcmap[i*2] = 1200000 + i + gcrootmap._gcmap[i*2+1] = i * 100 + 1 + assert gcrootmap._gcmap_deadentries == 0 + assert asmgcroot.sort_count == 0 + gcrootmap._gcmap_maxlength = 1400 + gcrootmap._gcmap_curlength = 1400 + gcrootmap._gcmap_sorted = False + # + gcrootmap.freeing_block(1200000 - 100, 1200000, asmgcroot=asmgcroot) + assert gcrootmap._gcmap_deadentries == 0 + assert asmgcroot.sort_count == 1 + # + gcrootmap.freeing_block(1200000 + 100, 1200000 + 200, + asmgcroot=asmgcroot) + assert gcrootmap._gcmap_deadentries == 100 + assert asmgcroot.sort_count == 1 + for i in range(700): + if 100 <= i < 200: + expected = 0 + else: + expected = i * 100 + 1 + assert gcrootmap._gcmap[i*2] == 1200000 + i + assert gcrootmap._gcmap[i*2+1] == expected + # + gcrootmap.freeing_block(1200000 + 650, 1200000 + 750, + asmgcroot=asmgcroot) + assert gcrootmap._gcmap_deadentries == 150 + assert asmgcroot.sort_count == 1 + for i in range(700): + if 100 <= i < 200 or 650 <= i: + expected = 0 + else: + expected = i * 100 + 1 + assert gcrootmap._gcmap[i*2] == 1200000 + i + assert gcrootmap._gcmap[i*2+1] == expected + +class TestGcRootMapShadowStack: + class FakeGcDescr: + force_index_ofs = 92 + + def test_make_shapes(self): + gcrootmap = GcRootMap_shadowstack(self.FakeGcDescr()) + shape = gcrootmap.get_basic_shape() + gcrootmap.add_frame_offset(shape, 16) + gcrootmap.add_frame_offset(shape, -24) + assert shape == [16, -24] + + def test_compress_callshape(self): + class FakeDataBlockWrapper: + def malloc_aligned(self, size, alignment): + assert alignment == 4 # even on 64-bits + assert size == 12 # 4*3, even on 64-bits + return rffi.cast(lltype.Signed, p) + datablockwrapper = FakeDataBlockWrapper() + p = lltype.malloc(rffi.CArray(rffi.INT), 3, immortal=True) + gcrootmap = GcRootMap_shadowstack(self.FakeGcDescr()) + shape = [16, -24] + gcrootmap.compress_callshape(shape, datablockwrapper) + assert rffi.cast(lltype.Signed, p[0]) == 16 + assert rffi.cast(lltype.Signed, p[1]) == -24 + assert rffi.cast(lltype.Signed, p[2]) == 0 class FakeLLOp(object): diff --git a/pypy/jit/backend/llsupport/regalloc.py b/pypy/jit/backend/llsupport/regalloc.py --- a/pypy/jit/backend/llsupport/regalloc.py +++ b/pypy/jit/backend/llsupport/regalloc.py @@ -1,5 +1,5 @@ -from pypy.jit.metainterp.history import Const, Box +from pypy.jit.metainterp.history import Const, Box, REF from pypy.rlib.objectmodel import we_are_translated class TempBox(Box): @@ -313,11 +313,12 @@ self.assembler.regalloc_mov(reg, to) # otherwise it's clean - def before_call(self, force_store=[], save_all_regs=False): + def before_call(self, force_store=[], save_all_regs=0): """ Spill registers before a call, as described by 'self.save_around_call_regs'. Registers are not spilled if they don't survive past the current operation, unless they - are listed in 'force_store'. + are listed in 'force_store'. 'save_all_regs' can be 0 (default), + 1 (save all), or 2 (save default+PTRs). """ for v, reg in self.reg_bindings.items(): if v not in force_store and self.longevity[v][1] <= self.position: @@ -325,9 +326,11 @@ del self.reg_bindings[v] self.free_regs.append(reg) continue - if not save_all_regs and reg not in self.save_around_call_regs: - # we don't have to - continue + if save_all_regs != 1 and reg not in self.save_around_call_regs: + if save_all_regs == 0: + continue # we don't have to + if v.type != REF: + continue # only save GC pointers self._sync_var(v) del self.reg_bindings[v] self.free_regs.append(reg) diff --git a/pypy/jit/backend/llsupport/gc.py b/pypy/jit/backend/llsupport/gc.py --- a/pypy/jit/backend/llsupport/gc.py +++ b/pypy/jit/backend/llsupport/gc.py @@ -317,8 +317,9 @@ return j @rgc.no_collect - def freeing_block(self, start, stop): - from pypy.rpython.memory.gctransform import asmgcroot + def freeing_block(self, start, stop, asmgcroot=None): + if asmgcroot is None: # always the case, except for tests + from pypy.rpython.memory.gctransform import asmgcroot # if [start:stop] is a raw block of assembler, then look up the # corresponding gcroot markers, and mark them as freed now in # self._gcmap by setting the 2nd address of every entry to NULL. @@ -441,9 +442,25 @@ return WORD else: # case of a MARKER followed by an assembler stack frame - self.follow_stack_frame_of_assembler(callback, gc, addr) + follow_stack_frame_of_assembler(callback, gc, addr) return 2 * WORD # + def follow_stack_frame_of_assembler(callback, gc, addr): + frame_addr = addr.signed[1] + addr = llmemory.cast_int_to_adr(frame_addr + self.force_index_ofs) + force_index = addr.signed[0] + if force_index < 0: + force_index = ~force_index + callshape = self._callshapes[force_index] + n = 0 + while True: + offset = rffi.cast(lltype.Signed, callshape[n]) + if offset == 0: + break + addr = llmemory.cast_int_to_adr(frame_addr + offset) + callback(gc, addr) + n += 1 + # jit2gc.update({ 'rootstackhook': collect_jit_stack_root, }) @@ -451,22 +468,6 @@ def initialize(self): pass - def follow_stack_frame_of_assembler(self, callback, gc, addr): - frame_addr = addr.signed[1] - addr = llmemory.cast_int_to_adr(frame_addr + self.force_index_ofs) - force_index = addr.signed[0] - if force_index < 0: - force_index = ~force_index - callshape = self._callshapes[force_index] - n = 0 - while True: - offset = rffi.cast(lltype.Signed, callshape[n]) - if offset == 0: - break - addr = llmemory.cast_int_to_adr(frame_addr + offset) - callback(gc, addr) - n += 1 - def get_basic_shape(self, is_64_bit=False): return [] diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -738,8 +738,12 @@ def _call(self, op, arglocs, force_store=[], guard_not_forced_op=None): save_all_regs = guard_not_forced_op is not None + self.xrm.before_call(force_store, save_all_regs=save_all_regs) + if not save_all_regs: + gcrootmap = gc_ll_descr = self.assembler.cpu.gc_ll_descr.gcrootmap + if gcrootmap and gcrootmap.is_shadow_stack: + save_all_regs = 2 self.rm.before_call(force_store, save_all_regs=save_all_regs) - self.xrm.before_call(force_store, save_all_regs=save_all_regs) if op.result is not None: if op.result.type == FLOAT: resloc = self.xrm.after_call(op.result) From commits-noreply at bitbucket.org Thu Mar 31 13:55:53 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:55:53 +0200 (CEST) Subject: [pypy-svn] pypy default: Oups. Fix. Message-ID: <20110331115553.61143282B90@codespeak.net> Author: Armin Rigo Branch: Changeset: r43043:8edbdba8b135 Date: 2011-03-31 13:54 +0200 http://bitbucket.org/pypy/pypy/changeset/8edbdba8b135/ Log: Oups. Fix. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -241,7 +241,9 @@ mc.J_il8(rx86.Conditions['NZ'], 0) jnz_location = mc.get_relative_pos() # - if IS_X86_64: + if IS_X86_32: + mc.ADD_ri(esp.value, 8) + elif IS_X86_64: # restore the registers for i in range(7, -1, -1): mc.MOVSD_xs(i, 8*i) From commits-noreply at bitbucket.org Thu Mar 31 13:55:53 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:55:53 +0200 (CEST) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110331115553.9E6CA282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r43044:d2a2b45e9a14 Date: 2011-03-31 13:55 +0200 http://bitbucket.org/pypy/pypy/changeset/d2a2b45e9a14/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 31 13:58:33 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:58:33 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Fix. Message-ID: <20110331115833.537AB282B90@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43045:cd0f89185a35 Date: 2011-03-31 13:57 +0200 http://bitbucket.org/pypy/pypy/changeset/cd0f89185a35/ Log: Fix. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -133,6 +133,7 @@ def setup(self, looptoken): assert self.memcpy_addr != 0, "setup_once() not called?" + self.currently_compiling_loop = looptoken self.pending_guard_tokens = [] self.mc = codebuf.MachineCodeBlockWrapper() if self.datablockwrapper is None: @@ -318,7 +319,6 @@ assert len(set(inputargs)) == len(inputargs) self.setup(looptoken) - self.currently_compiling_loop = looptoken funcname = self._find_debug_merge_point(operations) if log: self._register_counter() From commits-noreply at bitbucket.org Thu Mar 31 13:58:34 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:58:34 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Allow two versions of the tests (in the same executable): with or Message-ID: <20110331115834.06160282B90@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43046:6b92b28a1f50 Date: 2011-03-31 13:57 +0200 http://bitbucket.org/pypy/pypy/changeset/6b92b28a1f50/ Log: Allow two versions of the tests (in the same executable): with or without the malloc fast path. diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -6,7 +6,7 @@ """ import weakref, random -import py +import py, os from pypy.annotation import policy as annpolicy from pypy.rlib import rgc from pypy.rpython.lltypesystem import lltype, llmemory, rffi @@ -72,6 +72,17 @@ return entrypoint +def get_functions_to_patch(): + from pypy.jit.backend.llsupport import gc + # + can_inline_malloc1 = gc.GcLLDescr_framework.can_inline_malloc + def can_inline_malloc2(*args): + if os.getenv('PYPY_NO_INLINE_MALLOC'): + return False + return can_inline_malloc1(*args) + # + return {(gc.GcLLDescr_framework, 'can_inline_malloc'): can_inline_malloc2} + def compile(f, gc, **kwds): from pypy.annotation.listdef import s_list_of_strings from pypy.translator.translator import TranslationContext @@ -87,8 +98,21 @@ ann = t.buildannotator(policy=annpolicy.StrictAnnotatorPolicy()) ann.build_types(f, [s_list_of_strings], main_entry_point=True) t.buildrtyper().specialize() + if kwds['jit']: - apply_jit(t, enable_opts='') + patch = get_functions_to_patch() + old_value = {} + try: + for (obj, attr), value in patch.items(): + old_value[obj, attr] = getattr(obj, attr) + setattr(obj, attr, value) + # + apply_jit(t, enable_opts='') + # + finally: + for (obj, attr), oldvalue in old_value.items(): + setattr(obj, attr, oldvalue) + cbuilder = genc.CStandaloneBuilder(t, f, t.config) cbuilder.generate_source() cbuilder.compile() @@ -182,11 +206,17 @@ finally: GcLLDescr_framework.DEBUG = OLD_DEBUG + def _run(self, name, n, env): + res = self.cbuilder.cmdexec("%s %d" %(name, n), env=env) + assert int(res) == 20 + def run(self, name, n=2000): pypylog = udir.join('TestCompileFramework.log') - res = self.cbuilder.cmdexec("%s %d" %(name, n), - env={'PYPYLOG': ':%s' % pypylog}) - assert int(res) == 20 + env = {'PYPYLOG': ':%s' % pypylog, + 'PYPY_NO_INLINE_MALLOC': '1'} + self._run(name, n, env) + del env['PYPY_NO_INLINE_MALLOC'] + self._run(name, n, env) def run_orig(self, name, n, x): self.main_allfuncs(name, n, x) From commits-noreply at bitbucket.org Thu Mar 31 13:58:34 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 13:58:34 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: hg merge default Message-ID: <20110331115834.A2D89282B90@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43047:200474e5b413 Date: 2011-03-31 13:57 +0200 http://bitbucket.org/pypy/pypy/changeset/200474e5b413/ Log: hg merge default diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -259,7 +259,9 @@ mc.J_il8(rx86.Conditions['NZ'], 0) jnz_location = mc.get_relative_pos() # - if IS_X86_64: + if IS_X86_32: + mc.ADD_ri(esp.value, 8) + elif IS_X86_64: # restore the registers for i in range(7, -1, -1): mc.MOVSD_xs(i, 8*i) From commits-noreply at bitbucket.org Thu Mar 31 14:03:44 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 14:03:44 +0200 (CEST) Subject: [pypy-svn] pypy default: Pom pom pom. No tests :-(( Message-ID: <20110331120344.3B6A4282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r43048:52717999b838 Date: 2011-03-31 14:03 +0200 http://bitbucket.org/pypy/pypy/changeset/52717999b838/ Log: Pom pom pom. No tests :-(( diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -214,12 +214,11 @@ # mc = codebuf.MachineCodeBlockWrapper() # - if IS_X86_32: - stack_size = WORD - elif IS_X86_64: + stack_size = WORD + if IS_X86_64: # on the x86_64, we have to save all the registers that may # have been used to pass arguments - stack_size = WORD + 6*WORD + 8*8 + stack_size += 6*WORD + 8*8 for reg in [edi, esi, edx, ecx, r8, r9]: mc.PUSH_r(reg.value) mc.SUB_ri(esp.value, 8*8) @@ -228,6 +227,7 @@ # if IS_X86_32: mc.LEA_rb(eax.value, +8) + stack_size += 2*WORD mc.PUSH_r(eax.value) # alignment mc.PUSH_r(eax.value) elif IS_X86_64: @@ -242,7 +242,7 @@ jnz_location = mc.get_relative_pos() # if IS_X86_32: - mc.ADD_ri(esp.value, 8) + mc.ADD_ri(esp.value, 2*WORD) elif IS_X86_64: # restore the registers for i in range(7, -1, -1): From commits-noreply at bitbucket.org Thu Mar 31 14:06:08 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 14:06:08 +0200 (CEST) Subject: [pypy-svn] pypy default: from __future__ import with_statement Message-ID: <20110331120608.18BBB282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r43049:48995d6ea421 Date: 2011-03-31 14:05 +0200 http://bitbucket.org/pypy/pypy/changeset/48995d6ea421/ Log: from __future__ import with_statement diff --git a/pypy/module/imp/test/test_app.py b/pypy/module/imp/test/test_app.py --- a/pypy/module/imp/test/test_app.py +++ b/pypy/module/imp/test/test_app.py @@ -1,3 +1,4 @@ +from __future__ import with_statement MARKER = 42 class AppTestImpModule: From commits-noreply at bitbucket.org Thu Mar 31 14:06:08 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 14:06:08 +0200 (CEST) Subject: [pypy-svn] pypy default: Move sys.platform out of RPython code. Message-ID: <20110331120608.A40E8282BE7@codespeak.net> Author: Armin Rigo Branch: Changeset: r43050:85e67ad9239d Date: 2011-03-31 14:05 +0200 http://bitbucket.org/pypy/pypy/changeset/85e67ad9239d/ Log: Move sys.platform out of RPython code. diff --git a/pypy/module/imp/importing.py b/pypy/module/imp/importing.py --- a/pypy/module/imp/importing.py +++ b/pypy/module/imp/importing.py @@ -32,6 +32,7 @@ else: SO = ".so" DEFAULT_SOABI = 'pypy-14' +CHECK_FOR_PYW = sys.platform == 'win32' @specialize.memo() def get_so_extension(space): @@ -59,7 +60,7 @@ return PY_SOURCE, ".py", "U" # on Windows, also check for a .pyw file - if sys.platform == 'win32': + if CHECK_FOR_PYW: pyfile = filepart + ".pyw" if os.path.exists(pyfile) and case_ok(pyfile): return PY_SOURCE, ".pyw", "U" From commits-noreply at bitbucket.org Thu Mar 31 14:10:10 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 14:10:10 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: hg merge default Message-ID: <20110331121010.2059E282BE8@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43051:1fa79ad4c613 Date: 2011-03-31 14:09 +0200 http://bitbucket.org/pypy/pypy/changeset/1fa79ad4c613/ Log: hg merge default diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -232,12 +232,11 @@ # mc = codebuf.MachineCodeBlockWrapper() # - if IS_X86_32: - stack_size = WORD - elif IS_X86_64: + stack_size = WORD + if IS_X86_64: # on the x86_64, we have to save all the registers that may # have been used to pass arguments - stack_size = WORD + 6*WORD + 8*8 + stack_size += 6*WORD + 8*8 for reg in [edi, esi, edx, ecx, r8, r9]: mc.PUSH_r(reg.value) mc.SUB_ri(esp.value, 8*8) @@ -246,6 +245,7 @@ # if IS_X86_32: mc.LEA_rb(eax.value, +8) + stack_size += 2*WORD mc.PUSH_r(eax.value) # alignment mc.PUSH_r(eax.value) elif IS_X86_64: @@ -260,7 +260,7 @@ jnz_location = mc.get_relative_pos() # if IS_X86_32: - mc.ADD_ri(esp.value, 8) + mc.ADD_ri(esp.value, 2*WORD) elif IS_X86_64: # restore the registers for i in range(7, -1, -1): From commits-noreply at bitbucket.org Thu Mar 31 15:01:36 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 15:01:36 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Small fixes. Message-ID: <20110331130136.BB913282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43052:5c9ce3fce574 Date: 2011-03-31 12:54 +0000 http://bitbucket.org/pypy/pypy/changeset/5c9ce3fce574/ Log: Small fixes. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -133,7 +133,7 @@ def setup(self, looptoken): assert self.memcpy_addr != 0, "setup_once() not called?" - self.currently_compiling_loop = looptoken + self.current_clt = looptoken.compiled_loop_token self.pending_guard_tokens = [] self.mc = codebuf.MachineCodeBlockWrapper() if self.datablockwrapper is None: @@ -146,6 +146,7 @@ self.mc = None self.looppos = -1 self.currently_compiling_loop = None + self.current_clt = None def finish_once(self): if self._debug: @@ -321,6 +322,7 @@ assert len(set(inputargs)) == len(inputargs) self.setup(looptoken) + self.currently_compiling_loop = looptoken funcname = self._find_debug_merge_point(operations) if log: self._register_counter() @@ -1046,7 +1048,7 @@ # instruction that doesn't already have a force_index. gcrootmap = self.cpu.gc_ll_descr.gcrootmap if gcrootmap and gcrootmap.is_shadow_stack: - clt = self.currently_compiling_loop.compiled_loop_token + clt = self.current_clt force_index = clt.reserve_and_record_some_faildescr_index() self.mc.MOV_bi(FORCE_INDEX_OFS, force_index) return force_index diff --git a/pypy/jit/backend/llsupport/gc.py b/pypy/jit/backend/llsupport/gc.py --- a/pypy/jit/backend/llsupport/gc.py +++ b/pypy/jit/backend/llsupport/gc.py @@ -458,7 +458,8 @@ if offset == 0: break addr = llmemory.cast_int_to_adr(frame_addr + offset) - callback(gc, addr) + if gc.points_to_valid_gc_object(addr): + callback(gc, addr) n += 1 # jit2gc.update({ From commits-noreply at bitbucket.org Thu Mar 31 16:56:00 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 16:56:00 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Reimplement support for the fastpath mallocs. Message-ID: <20110331145600.5BCBA282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43053:db1614adf648 Date: 2011-03-31 16:37 +0200 http://bitbucket.org/pypy/pypy/changeset/db1614adf648/ Log: Reimplement support for the fastpath mallocs. diff --git a/pypy/jit/backend/x86/arch.py b/pypy/jit/backend/x86/arch.py --- a/pypy/jit/backend/x86/arch.py +++ b/pypy/jit/backend/x86/arch.py @@ -1,17 +1,28 @@ # Constants that depend on whether we are on 32-bit or 64-bit +# The frame size gives the standard fixed part at the start of +# every assembler frame: the saved value of some registers, +# one word for the force_index, and some extra space used only +# during a malloc that needs to go via its slow path. + import sys if sys.maxint == (2**31 - 1): WORD = 4 - # ebp + ebx + esi + edi + force_index = 5 words - FRAME_FIXED_SIZE = 5 + # ebp + ebx + esi + edi + force_index + 4 extra words = 9 words + FRAME_FIXED_SIZE = 9 + FORCE_INDEX_OFS = -4*WORD IS_X86_32 = True IS_X86_64 = False else: WORD = 8 - # rbp + rbx + r12 + r13 + r14 + r15 + force_index = 7 words - FRAME_FIXED_SIZE = 7 + # rbp + rbx + r12 + r13 + r14 + r15 + force_index + 11 extra words = 18 + FRAME_FIXED_SIZE = 18 + FORCE_INDEX_OFS = -6*WORD IS_X86_32 = False IS_X86_64 = True -FORCE_INDEX_OFS = -(FRAME_FIXED_SIZE-1)*WORD +MY_COPY_OF_REGS = -(FRAME_FIXED_SIZE-1)*WORD +# The extra space has room for almost all registers, apart from eax and edx +# which are used in the malloc itself. They are: +# ecx, ebx, esi, edi [32 and 64 bits] +# r8, r9, r10, r12, r13, r14, r15 [64 bits only] diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -8,9 +8,8 @@ from pypy.rpython.lltypesystem.lloperation import llop from pypy.rpython.annlowlevel import llhelper from pypy.jit.backend.model import CompiledLoopToken -from pypy.jit.backend.x86.regalloc import (RegAlloc, X86RegisterManager, - X86XMMRegisterManager, get_ebp_ofs, - _get_scale) +from pypy.jit.backend.x86.regalloc import (RegAlloc, get_ebp_ofs, + _get_scale, gpr_reg_mgr_cls) from pypy.jit.backend.x86.arch import (FRAME_FIXED_SIZE, FORCE_INDEX_OFS, WORD, IS_X86_32, IS_X86_64) @@ -188,6 +187,8 @@ # if gcrootmap.is_shadow_stack: # ---- shadowstack ---- + for reg, ofs in gpr_reg_mgr_cls.REGLOC_TO_COPY_AREA_OFS.items(): + mc.MOV_br(ofs, reg.value) mc.SUB_ri(esp.value, 16 - WORD) # stack alignment of 16 bytes if IS_X86_32: mc.MOV_sr(0, edx.value) # push argument @@ -195,6 +196,8 @@ mc.MOV_rr(edi.value, edx.value) mc.CALL(imm(addr)) mc.ADD_ri(esp.value, 16 - WORD) + for reg, ofs in gpr_reg_mgr_cls.REGLOC_TO_COPY_AREA_OFS.items(): + mc.MOV_rb(reg.value, ofs) else: # ---- asmgcc ---- if IS_X86_32: @@ -736,8 +739,8 @@ nonfloatlocs, floatlocs = arglocs self._call_header() stackadjustpos = self._patchable_stackadjust() - tmp = X86RegisterManager.all_regs[0] - xmmtmp = X86XMMRegisterManager.all_regs[0] + tmp = eax + xmmtmp = xmm0 self.mc.begin_reuse_scratch_register() for i in range(len(nonfloatlocs)): loc = nonfloatlocs[i] @@ -1961,7 +1964,7 @@ # load the return value from fail_boxes_xxx[0] kind = op.result.type if kind == FLOAT: - xmmtmp = X86XMMRegisterManager.all_regs[0] + xmmtmp = xmm0 adr = self.fail_boxes_float.get_addr_for_num(0) self.mc.MOVSD(xmmtmp, heap(adr)) self.mc.MOVSD(result_loc, xmmtmp) @@ -2056,12 +2059,12 @@ not_implemented("not implemented operation (guard): %s" % op.getopname()) - def mark_gc_roots(self, force_index): + def mark_gc_roots(self, force_index, use_copy_area=False): if force_index < 0: return # not needed gcrootmap = self.cpu.gc_ll_descr.gcrootmap if gcrootmap: - mark = self._regalloc.get_mark_gc_roots(gcrootmap) + mark = self._regalloc.get_mark_gc_roots(gcrootmap, use_copy_area) if gcrootmap.is_shadow_stack: gcrootmap.write_callshape(mark, force_index) else: @@ -2106,7 +2109,8 @@ # there are two helpers to call only with asmgcc slowpath_addr1 = self.malloc_fixedsize_slowpath1 self.mc.CALL(imm(slowpath_addr1)) - self.mark_gc_roots() + self.mark_gc_roots(self.write_new_force_index(), + use_copy_area=gcrootmap.is_shadow_stack) slowpath_addr2 = self.malloc_fixedsize_slowpath2 self.mc.CALL(imm(slowpath_addr2)) diff --git a/pypy/jit/backend/x86/rx86.py b/pypy/jit/backend/x86/rx86.py --- a/pypy/jit/backend/x86/rx86.py +++ b/pypy/jit/backend/x86/rx86.py @@ -543,6 +543,9 @@ # x87 instructions FSTP_b = insn('\xDD', orbyte(3<<3), stack_bp(1)) + # reserved as an illegal instruction + UD2 = insn('\x0F\x0B') + # ------------------------------ SSE2 ------------------------------ # Conversion diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -19,7 +19,8 @@ from pypy.jit.backend.llsupport.descr import BaseCallDescr, BaseSizeDescr from pypy.jit.backend.llsupport.regalloc import FrameManager, RegisterManager,\ TempBox -from pypy.jit.backend.x86.arch import WORD, FRAME_FIXED_SIZE, IS_X86_32, IS_X86_64 +from pypy.jit.backend.x86.arch import WORD, FRAME_FIXED_SIZE +from pypy.jit.backend.x86.arch import IS_X86_32, IS_X86_64, MY_COPY_OF_REGS from pypy.rlib.rarithmetic import r_longlong, r_uint class X86RegisterManager(RegisterManager): @@ -34,6 +35,12 @@ esi: 2, edi: 3, } + REGLOC_TO_COPY_AREA_OFS = { + ecx: MY_COPY_OF_REGS + 0 * WORD, + ebx: MY_COPY_OF_REGS + 1 * WORD, + esi: MY_COPY_OF_REGS + 2 * WORD, + edi: MY_COPY_OF_REGS + 3 * WORD, + } def call_result_location(self, v): return eax @@ -61,6 +68,19 @@ r14: 4, r15: 5, } + REGLOC_TO_COPY_AREA_OFS = { + ecx: MY_COPY_OF_REGS + 0 * WORD, + ebx: MY_COPY_OF_REGS + 1 * WORD, + esi: MY_COPY_OF_REGS + 2 * WORD, + edi: MY_COPY_OF_REGS + 3 * WORD, + r8: MY_COPY_OF_REGS + 4 * WORD, + r9: MY_COPY_OF_REGS + 5 * WORD, + r10: MY_COPY_OF_REGS + 6 * WORD, + r12: MY_COPY_OF_REGS + 7 * WORD, + r13: MY_COPY_OF_REGS + 8 * WORD, + r14: MY_COPY_OF_REGS + 9 * WORD, + r15: MY_COPY_OF_REGS + 10 * WORD, + } class X86XMMRegisterManager(RegisterManager): @@ -117,6 +137,16 @@ else: return 1 +if WORD == 4: + gpr_reg_mgr_cls = X86RegisterManager + xmm_reg_mgr_cls = X86XMMRegisterManager +elif WORD == 8: + gpr_reg_mgr_cls = X86_64_RegisterManager + xmm_reg_mgr_cls = X86_64_XMMRegisterManager +else: + raise AssertionError("Word size should be 4 or 8") + + class RegAlloc(object): def __init__(self, assembler, translate_support_code=False): @@ -135,16 +165,6 @@ # compute longevity of variables longevity = self._compute_vars_longevity(inputargs, operations) self.longevity = longevity - # XXX - if cpu.WORD == 4: - gpr_reg_mgr_cls = X86RegisterManager - xmm_reg_mgr_cls = X86XMMRegisterManager - elif cpu.WORD == 8: - gpr_reg_mgr_cls = X86_64_RegisterManager - xmm_reg_mgr_cls = X86_64_XMMRegisterManager - else: - raise AssertionError("Word size should be 4 or 8") - self.rm = gpr_reg_mgr_cls(longevity, frame_manager = self.fm, assembler = self.assembler) @@ -841,20 +861,29 @@ self.rm.possibly_free_vars_for_op(op) def _fastpath_malloc(self, op, descr): - XXX assert isinstance(descr, BaseSizeDescr) gc_ll_descr = self.assembler.cpu.gc_ll_descr self.rm.force_allocate_reg(op.result, selected_reg=eax) - # We need to force-allocate each of save_around_call_regs now. - # The alternative would be to save and restore them around the - # actual call to malloc(), in the rare case where we need to do - # it; however, mark_gc_roots() would need to be adapted to know - # where the variables end up being saved. Messy. - for reg in self.rm.save_around_call_regs: - if reg is not eax: - tmp_box = TempBox() - self.rm.force_allocate_reg(tmp_box, selected_reg=reg) - self.rm.possibly_free_var(tmp_box) + + if gc_ll_descr.gcrootmap.is_shadow_stack: + # ---- shadowstack ---- + # We need edx as a temporary, but otherwise don't save any more + # register. See comments in _build_malloc_fixedsize_slowpath(). + tmp_box = TempBox() + self.rm.force_allocate_reg(tmp_box, selected_reg=edx) + self.rm.possibly_free_var(tmp_box) + else: + # ---- asmgcc ---- + # We need to force-allocate each of save_around_call_regs now. + # The alternative would be to save and restore them around the + # actual call to malloc(), in the rare case where we need to do + # it; however, mark_gc_roots() would need to be adapted to know + # where the variables end up being saved. Messy. + for reg in self.rm.save_around_call_regs: + if reg is not eax: + tmp_box = TempBox() + self.rm.force_allocate_reg(tmp_box, selected_reg=reg) + self.rm.possibly_free_var(tmp_box) self.assembler.malloc_cond_fixedsize( gc_ll_descr.get_nursery_free_addr(), @@ -864,8 +893,7 @@ def consider_new(self, op): gc_ll_descr = self.assembler.cpu.gc_ll_descr - os.write(2, "fixme: consider_new\n") - if 0 and gc_ll_descr.can_inline_malloc(op.getdescr()): # XXX + if gc_ll_descr.can_inline_malloc(op.getdescr()): self._fastpath_malloc(op, op.getdescr()) else: args = gc_ll_descr.args_for_new(op.getdescr()) @@ -875,8 +903,7 @@ def consider_new_with_vtable(self, op): classint = op.getarg(0).getint() descrsize = heaptracker.vtable2descr(self.assembler.cpu, classint) - os.write(2, "fixme: consider_new_with_vtable\n") - if 0 and self.assembler.cpu.gc_ll_descr.can_inline_malloc(descrsize): # XXX + if self.assembler.cpu.gc_ll_descr.can_inline_malloc(descrsize): self._fastpath_malloc(op, descrsize) self.assembler.set_vtable(eax, imm(classint)) # result of fastpath malloc is in eax @@ -1207,7 +1234,7 @@ def consider_jit_debug(self, op): pass - def get_mark_gc_roots(self, gcrootmap): + def get_mark_gc_roots(self, gcrootmap, use_copy_area=False): shape = gcrootmap.get_basic_shape(IS_X86_64) for v, val in self.fm.frame_bindings.items(): if (isinstance(v, BoxPtr) and self.rm.stays_alive(v)): @@ -1217,8 +1244,14 @@ if reg is eax: continue # ok to ignore this one if (isinstance(v, BoxPtr) and self.rm.stays_alive(v)): - assert reg in self.rm.REGLOC_TO_GCROOTMAP_REG_INDEX - gcrootmap.add_callee_save_reg(shape, self.rm.REGLOC_TO_GCROOTMAP_REG_INDEX[reg]) + if use_copy_area: + assert reg in self.rm.REGLOC_TO_COPY_AREA_OFS + area_offset = self.rm.REGLOC_TO_COPY_AREA_OFS[reg] + gcrootmap.add_frame_offset(shape, area_offset) + else: + assert reg in self.rm.REGLOC_TO_GCROOTMAP_REG_INDEX + gcrootmap.add_callee_save_reg( + shape, self.rm.REGLOC_TO_GCROOTMAP_REG_INDEX[reg]) return gcrootmap.compress_callshape(shape, self.assembler.datablockwrapper) diff --git a/pypy/jit/backend/x86/test/test_zrpy_gc.py b/pypy/jit/backend/x86/test/test_zrpy_gc.py --- a/pypy/jit/backend/x86/test/test_zrpy_gc.py +++ b/pypy/jit/backend/x86/test/test_zrpy_gc.py @@ -77,8 +77,11 @@ # can_inline_malloc1 = gc.GcLLDescr_framework.can_inline_malloc def can_inline_malloc2(*args): - if os.getenv('PYPY_NO_INLINE_MALLOC'): - return False + try: + if os.environ['PYPY_NO_INLINE_MALLOC']: + return False + except KeyError: + pass return can_inline_malloc1(*args) # return {(gc.GcLLDescr_framework, 'can_inline_malloc'): can_inline_malloc2} @@ -215,7 +218,7 @@ env = {'PYPYLOG': ':%s' % pypylog, 'PYPY_NO_INLINE_MALLOC': '1'} self._run(name, n, env) - del env['PYPY_NO_INLINE_MALLOC'] + env['PYPY_NO_INLINE_MALLOC'] = '' self._run(name, n, env) def run_orig(self, name, n, x): From commits-noreply at bitbucket.org Thu Mar 31 16:56:01 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 16:56:01 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Bah. Two definitions of the constant FRAME_FIXED_SIZE, which were Message-ID: <20110331145601.4F1DE282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43054:944e0475cb5c Date: 2011-03-31 16:51 +0200 http://bitbucket.org/pypy/pypy/changeset/944e0475cb5c/ Log: Bah. Two definitions of the constant FRAME_FIXED_SIZE, which were now out of sync with each other. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -566,7 +566,7 @@ def _get_offset_of_ebp_from_esp(self, allocated_depth): # Given that [EBP] is where we saved EBP, i.e. in the last word # of our fixed frame, then the 'words' value is: - words = (self.cpu.FRAME_FIXED_SIZE - 1) + allocated_depth + words = (FRAME_FIXED_SIZE - 1) + allocated_depth # align, e.g. for Mac OS X aligned_words = align_stack_words(words+2)-2 # 2 = EIP+EBP return -WORD * aligned_words diff --git a/pypy/jit/backend/x86/runner.py b/pypy/jit/backend/x86/runner.py --- a/pypy/jit/backend/x86/runner.py +++ b/pypy/jit/backend/x86/runner.py @@ -149,7 +149,6 @@ WORD = 4 NUM_REGS = 8 CALLEE_SAVE_REGISTERS = [regloc.ebx, regloc.esi, regloc.edi] - FRAME_FIXED_SIZE = len(CALLEE_SAVE_REGISTERS) + 2 supports_longlong = True @@ -165,7 +164,6 @@ WORD = 8 NUM_REGS = 16 CALLEE_SAVE_REGISTERS = [regloc.ebx, regloc.r12, regloc.r13, regloc.r14, regloc.r15] - FRAME_FIXED_SIZE = len(CALLEE_SAVE_REGISTERS) + 2 def __init__(self, *args, **kwargs): assert sys.maxint == (2**63 - 1) From commits-noreply at bitbucket.org Thu Mar 31 16:56:03 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 16:56:03 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Reorder the locations, for no real good reason. Message-ID: <20110331145603.1170F282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43055:1400f4d7ecb2 Date: 2011-03-31 16:55 +0200 http://bitbucket.org/pypy/pypy/changeset/1400f4d7ecb2/ Log: Reorder the locations, for no real good reason. diff --git a/pypy/jit/backend/x86/arch.py b/pypy/jit/backend/x86/arch.py --- a/pypy/jit/backend/x86/arch.py +++ b/pypy/jit/backend/x86/arch.py @@ -8,20 +8,21 @@ import sys if sys.maxint == (2**31 - 1): WORD = 4 - # ebp + ebx + esi + edi + force_index + 4 extra words = 9 words + # ebp + ebx + esi + edi + 4 extra words + force_index = 9 words FRAME_FIXED_SIZE = 9 - FORCE_INDEX_OFS = -4*WORD + FORCE_INDEX_OFS = -8*WORD + MY_COPY_OF_REGS = -7*WORD IS_X86_32 = True IS_X86_64 = False else: WORD = 8 - # rbp + rbx + r12 + r13 + r14 + r15 + force_index + 11 extra words = 18 + # rbp + rbx + r12 + r13 + r14 + r15 + 11 extra words + force_index = 18 FRAME_FIXED_SIZE = 18 - FORCE_INDEX_OFS = -6*WORD + FORCE_INDEX_OFS = -17*WORD + MY_COPY_OF_REGS = -16*WORD IS_X86_32 = False IS_X86_64 = True -MY_COPY_OF_REGS = -(FRAME_FIXED_SIZE-1)*WORD # The extra space has room for almost all registers, apart from eax and edx # which are used in the malloc itself. They are: # ecx, ebx, esi, edi [32 and 64 bits] From commits-noreply at bitbucket.org Thu Mar 31 17:12:06 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 17:12:06 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Fixes for test_gc_integration.py. Message-ID: <20110331151206.35B0D282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43056:05cd7ad17f47 Date: 2011-03-31 17:02 +0200 http://bitbucket.org/pypy/pypy/changeset/05cd7ad17f47/ Log: Fixes for test_gc_integration.py. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -185,7 +185,7 @@ mc.SUB_rr(edx.value, eax.value) # compute the size we want addr = self.cpu.gc_ll_descr.get_malloc_fixedsize_slowpath_addr() # - if gcrootmap.is_shadow_stack: + if gcrootmap is not None and gcrootmap.is_shadow_stack: # ---- shadowstack ---- for reg, ofs in gpr_reg_mgr_cls.REGLOC_TO_COPY_AREA_OFS.items(): mc.MOV_br(ofs, reg.value) @@ -2105,12 +2105,13 @@ self._regalloc.reserve_param(1+16) gcrootmap = self.cpu.gc_ll_descr.gcrootmap - if not gcrootmap.is_shadow_stack: + shadow_stack = (gcrootmap is not None and gcrootmap.is_shadow_stack) + if not shadow_stack: # there are two helpers to call only with asmgcc slowpath_addr1 = self.malloc_fixedsize_slowpath1 self.mc.CALL(imm(slowpath_addr1)) self.mark_gc_roots(self.write_new_force_index(), - use_copy_area=gcrootmap.is_shadow_stack) + use_copy_area=shadow_stack) slowpath_addr2 = self.malloc_fixedsize_slowpath2 self.mc.CALL(imm(slowpath_addr2)) diff --git a/pypy/jit/backend/x86/test/test_gc_integration.py b/pypy/jit/backend/x86/test/test_gc_integration.py --- a/pypy/jit/backend/x86/test/test_gc_integration.py +++ b/pypy/jit/backend/x86/test/test_gc_integration.py @@ -26,9 +26,10 @@ CPU = getcpuclass() class MockGcRootMap(object): + is_shadow_stack = False def get_basic_shape(self, is_64_bit): return ['shape'] - def add_ebp_offset(self, shape, offset): + def add_frame_offset(self, shape, offset): shape.append(offset) def add_callee_save_reg(self, shape, reg_index): index_to_name = { 1: 'ebx', 2: 'esi', 3: 'edi' } diff --git a/pypy/jit/backend/x86/regalloc.py b/pypy/jit/backend/x86/regalloc.py --- a/pypy/jit/backend/x86/regalloc.py +++ b/pypy/jit/backend/x86/regalloc.py @@ -865,7 +865,7 @@ gc_ll_descr = self.assembler.cpu.gc_ll_descr self.rm.force_allocate_reg(op.result, selected_reg=eax) - if gc_ll_descr.gcrootmap.is_shadow_stack: + if gc_ll_descr.gcrootmap and gc_ll_descr.gcrootmap.is_shadow_stack: # ---- shadowstack ---- # We need edx as a temporary, but otherwise don't save any more # register. See comments in _build_malloc_fixedsize_slowpath(). From commits-noreply at bitbucket.org Thu Mar 31 17:12:06 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 17:12:06 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Fix for rpython/lltypesystem/test/test_lloperation.py. Message-ID: <20110331151206.B99FE282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43057:090b53db6cfd Date: 2011-03-31 17:07 +0200 http://bitbucket.org/pypy/pypy/changeset/090b53db6cfd/ Log: Fix for rpython/lltypesystem/test/test_lloperation.py. diff --git a/pypy/rpython/llinterp.py b/pypy/rpython/llinterp.py --- a/pypy/rpython/llinterp.py +++ b/pypy/rpython/llinterp.py @@ -854,6 +854,9 @@ def op_gc_adr_of_nursery_free(self): raise NotImplementedError + def op_gc_adr_of_root_stack_top(self): + raise NotImplementedError + def op_gc_call_rtti_destructor(self, rtti, addr): if hasattr(rtti._obj, 'destructor_funcptr'): d = rtti._obj.destructor_funcptr From commits-noreply at bitbucket.org Thu Mar 31 18:00:34 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 18:00:34 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Revert moving asmgcroot to a local import. It was messy to test Message-ID: <20110331160034.2C523282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43058:f5e14e5fad38 Date: 2011-03-31 15:40 +0000 http://bitbucket.org/pypy/pypy/changeset/f5e14e5fad38/ Log: Revert moving asmgcroot to a local import. It was messy to test while keeping as valid RPython code. diff --git a/pypy/jit/backend/llsupport/test/test_gc.py b/pypy/jit/backend/llsupport/test/test_gc.py --- a/pypy/jit/backend/llsupport/test/test_gc.py +++ b/pypy/jit/backend/llsupport/test/test_gc.py @@ -9,7 +9,6 @@ from pypy.jit.tool.oparser import parse from pypy.rpython.lltypesystem.rclass import OBJECT, OBJECT_VTABLE from pypy.jit.metainterp.test.test_optimizeopt import equaloplists -from pypy.rpython.memory.gctransform import asmgcroot def test_boehm(): gc_ll_descr = GcLLDescr_boehm(None, None, None) @@ -181,48 +180,52 @@ p = rffi.cast(rffi.CArrayPtr(llmemory.Address), gcmapstart) p = rffi.ptradd(p, 2*i) return llmemory.cast_ptr_to_adr(p) - asmgcroot = Asmgcroot() + saved = gc.asmgcroot + try: + gc.asmgcroot = Asmgcroot() + # + gcrootmap = GcRootMap_asmgcc() + gcrootmap._gcmap = lltype.malloc(gcrootmap.GCMAP_ARRAY, + 1400, flavor='raw', + immortal=True) + for i in range(700): + gcrootmap._gcmap[i*2] = 1200000 + i + gcrootmap._gcmap[i*2+1] = i * 100 + 1 + assert gcrootmap._gcmap_deadentries == 0 + assert gc.asmgcroot.sort_count == 0 + gcrootmap._gcmap_maxlength = 1400 + gcrootmap._gcmap_curlength = 1400 + gcrootmap._gcmap_sorted = False + # + gcrootmap.freeing_block(1200000 - 100, 1200000) + assert gcrootmap._gcmap_deadentries == 0 + assert gc.asmgcroot.sort_count == 1 + # + gcrootmap.freeing_block(1200000 + 100, 1200000 + 200) + assert gcrootmap._gcmap_deadentries == 100 + assert gc.asmgcroot.sort_count == 1 + for i in range(700): + if 100 <= i < 200: + expected = 0 + else: + expected = i * 100 + 1 + assert gcrootmap._gcmap[i*2] == 1200000 + i + assert gcrootmap._gcmap[i*2+1] == expected + # + gcrootmap.freeing_block(1200000 + 650, 1200000 + 750) + assert gcrootmap._gcmap_deadentries == 150 + assert gc.asmgcroot.sort_count == 1 + for i in range(700): + if 100 <= i < 200 or 650 <= i: + expected = 0 + else: + expected = i * 100 + 1 + assert gcrootmap._gcmap[i*2] == 1200000 + i + assert gcrootmap._gcmap[i*2+1] == expected # - gcrootmap = GcRootMap_asmgcc() - gcrootmap._gcmap = lltype.malloc(gcrootmap.GCMAP_ARRAY, - 1400, flavor='raw', - immortal=True) - for i in range(700): - gcrootmap._gcmap[i*2] = 1200000 + i - gcrootmap._gcmap[i*2+1] = i * 100 + 1 - assert gcrootmap._gcmap_deadentries == 0 - assert asmgcroot.sort_count == 0 - gcrootmap._gcmap_maxlength = 1400 - gcrootmap._gcmap_curlength = 1400 - gcrootmap._gcmap_sorted = False - # - gcrootmap.freeing_block(1200000 - 100, 1200000, asmgcroot=asmgcroot) - assert gcrootmap._gcmap_deadentries == 0 - assert asmgcroot.sort_count == 1 - # - gcrootmap.freeing_block(1200000 + 100, 1200000 + 200, - asmgcroot=asmgcroot) - assert gcrootmap._gcmap_deadentries == 100 - assert asmgcroot.sort_count == 1 - for i in range(700): - if 100 <= i < 200: - expected = 0 - else: - expected = i * 100 + 1 - assert gcrootmap._gcmap[i*2] == 1200000 + i - assert gcrootmap._gcmap[i*2+1] == expected - # - gcrootmap.freeing_block(1200000 + 650, 1200000 + 750, - asmgcroot=asmgcroot) - assert gcrootmap._gcmap_deadentries == 150 - assert asmgcroot.sort_count == 1 - for i in range(700): - if 100 <= i < 200 or 650 <= i: - expected = 0 - else: - expected = i * 100 + 1 - assert gcrootmap._gcmap[i*2] == 1200000 + i - assert gcrootmap._gcmap[i*2+1] == expected + finally: + gc.asmgcroot = saved + class TestGcRootMapShadowStack: class FakeGcDescr: diff --git a/pypy/jit/backend/llsupport/gc.py b/pypy/jit/backend/llsupport/gc.py --- a/pypy/jit/backend/llsupport/gc.py +++ b/pypy/jit/backend/llsupport/gc.py @@ -16,6 +16,7 @@ from pypy.jit.backend.llsupport.descr import GcCache, get_field_descr from pypy.jit.backend.llsupport.descr import GcPtrFieldDescr from pypy.jit.backend.llsupport.descr import get_call_descr +from pypy.rpython.memory.gctransform import asmgcroot # ____________________________________________________________ @@ -317,9 +318,7 @@ return j @rgc.no_collect - def freeing_block(self, start, stop, asmgcroot=None): - if asmgcroot is None: # always the case, except for tests - from pypy.rpython.memory.gctransform import asmgcroot + def freeing_block(self, start, stop): # if [start:stop] is a raw block of assembler, then look up the # corresponding gcroot markers, and mark them as freed now in # self._gcmap by setting the 2nd address of every entry to NULL. From commits-noreply at bitbucket.org Thu Mar 31 18:25:55 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 31 Mar 2011 18:25:55 +0200 (CEST) Subject: [pypy-svn] pypy default: update the inlined pyrepl to revision 4d9968d3e7da Message-ID: <20110331162555.DD2CA282B9C@codespeak.net> Author: Antonio Cuni Branch: Changeset: r43059:ed0020d371de Date: 2011-03-31 18:18 +0200 http://bitbucket.org/pypy/pypy/changeset/ed0020d371de/ Log: update the inlined pyrepl to revision 4d9968d3e7da diff --git a/lib_pypy/pyrepl/unix_console.py b/lib_pypy/pyrepl/unix_console.py --- a/lib_pypy/pyrepl/unix_console.py +++ b/lib_pypy/pyrepl/unix_console.py @@ -292,6 +292,12 @@ self.__write_code(self._el) self.__write(newline[x:]) self.__posxy = len(newline), y + + if '\x1b' in newline: + # ANSI escape characters are present, so we can't assume + # anything about the position of the cursor. Moving the cursor + # to the left margin should work to get to a known position. + self.move_cursor(0, y) def __write(self, text): self.__buffer.append((text, 0)) diff --git a/lib_pypy/pyrepl/reader.py b/lib_pypy/pyrepl/reader.py --- a/lib_pypy/pyrepl/reader.py +++ b/lib_pypy/pyrepl/reader.py @@ -274,8 +274,12 @@ screeninfo.append((0, [])) self.lxy = p, ln prompt = self.get_prompt(ln, ll >= p >= 0) + while '\n' in prompt: + pre_prompt, _, prompt = prompt.partition('\n') + screen.append(pre_prompt) + screeninfo.append((0, [])) p -= ll + 1 - lp = len(prompt) + prompt, lp = self.process_prompt(prompt) l, l2 = disp_str(line) wrapcount = (len(l) + lp) / w if wrapcount == 0: @@ -297,6 +301,31 @@ screeninfo.append((0, [])) return screen + def process_prompt(self, prompt): + """ Process the prompt. + + This means calculate the length of the prompt. The character \x01 + and \x02 are used to bracket ANSI control sequences and need to be + excluded from the length calculation. So also a copy of the prompt + is returned with these control characters removed. """ + + out_prompt = '' + l = len(prompt) + pos = 0 + while True: + s = prompt.find('\x01', pos) + if s == -1: + break + e = prompt.find('\x02', s) + if e == -1: + break + # Found start and end brackets, subtract from string length + l = l - (e-s+1) + out_prompt += prompt[pos:s] + prompt[s+1:e] + pos = e+1 + out_prompt += prompt[pos:] + return out_prompt, l + def bow(self, p=None): """Return the 0-based index of the word break preceding p most immediately. From commits-noreply at bitbucket.org Thu Mar 31 18:25:56 2011 From: commits-noreply at bitbucket.org (antocuni) Date: Thu, 31 Mar 2011 18:25:56 +0200 (CEST) Subject: [pypy-svn] pypy default: merge heads Message-ID: <20110331162556.37B2B282BE9@codespeak.net> Author: Antonio Cuni Branch: Changeset: r43060:a623e9cc3648 Date: 2011-03-31 18:25 +0200 http://bitbucket.org/pypy/pypy/changeset/a623e9cc3648/ Log: merge heads From commits-noreply at bitbucket.org Thu Mar 31 18:43:15 2011 From: commits-noreply at bitbucket.org (Guillebert Romain) Date: Thu, 31 Mar 2011 18:43:15 +0200 (CEST) Subject: [pypy-svn] pypy default: Adding os.wait3 to the standard library Message-ID: <20110331164315.0A2ED36C204@codespeak.net> Author: Guillebert Romain Branch: Changeset: r43061:18400e48e571 Date: 2011-03-31 17:42 +0100 http://bitbucket.org/pypy/pypy/changeset/18400e48e571/ Log: Adding os.wait3 to the standard library diff --git a/lib_pypy/_pypy_wait.py b/lib_pypy/_pypy_wait.py new file mode 100644 --- /dev/null +++ b/lib_pypy/_pypy_wait.py @@ -0,0 +1,35 @@ +from ctypes import CDLL, c_int, POINTER, byref +from ctypes.util import find_library +from resource import _struct_rusage, struct_rusage + +libc = CDLL(find_library("c")) +wait3 = libc.wait3 + +wait3.argtypes = [POINTER(c_int), c_int, POINTER(_struct_rusage)] + +def wait3(options): + status = c_int() + _rusage = _struct_rusage() + pid = wait3(byref(status), c_int(options), byref(_rusage)) + + rusage = struct_rusage(( + float(_rusage.ru_utime), + float(_rusage.ru_stime), + _rusage.ru_maxrss, + _rusage.ru_ixrss, + _rusage.ru_idrss, + _rusage.ru_isrss, + _rusage.ru_minflt, + _rusage.ru_majflt, + _rusage.ru_nswap, + _rusage.ru_inblock, + _rusage.ru_oublock, + _rusage.ru_msgsnd, + _rusage.ru_msgrcv, + _rusage.ru_nsignals, + _rusage.ru_nvcsw, + _rusage.ru_nivcsw)) + + return pid, status.value, rusage + +__all__ = ["wait3"] diff --git a/pypy/module/posix/app_posix.py b/pypy/module/posix/app_posix.py --- a/pypy/module/posix/app_posix.py +++ b/pypy/module/posix/app_posix.py @@ -195,6 +195,14 @@ """ return posix.waitpid(-1, 0) + def wait3(options): + """ wait3() -> (pid, status, rusage) + + Wait for completion of a child process and provides resource usage informations + """ + from _pypy_wait import wait3 + return wait3(options) + else: # Windows implementations diff --git a/pypy/module/posix/__init__.py b/pypy/module/posix/__init__.py --- a/pypy/module/posix/__init__.py +++ b/pypy/module/posix/__init__.py @@ -31,6 +31,8 @@ if hasattr(os, 'wait'): appleveldefs['wait'] = 'app_posix.wait' + if hasattr(os, 'wait3'): + appleveldefs['wait3'] = 'app_posix.wait3' interpleveldefs = { 'open' : 'interp_posix.open', diff --git a/lib_pypy/pypy_test/test_os_wait3.py b/lib_pypy/pypy_test/test_os_wait3.py new file mode 100644 --- /dev/null +++ b/lib_pypy/pypy_test/test_os_wait3.py @@ -0,0 +1,19 @@ +import os + +if hasattr(os, 'wait3'): + def test_os_wait3(): + exit_status = 0x33 + + if not hasattr(os, "fork"): + skip("Need fork() to test wait3()") + + child = os.fork() + if child == 0: # in child + os._exit(exit_status) + else: + pid, status, rusage = os.wait3(0) + assert child == pid + assert os.WIFEXITED(status) + assert os.WEXITSTATUS(status) == exit_status + assert isinstance(rusage.ru_utime, float) + assert isinstance(rusage.ru_maxrss, int) From commits-noreply at bitbucket.org Thu Mar 31 20:47:46 2011 From: commits-noreply at bitbucket.org (arigo) Date: Thu, 31 Mar 2011 20:47:46 +0200 (CEST) Subject: [pypy-svn] pypy jit-shadowstack: Fix: call_may_force ended up calling write_new_force_index(), thus Message-ID: <20110331184746.2FF36282B9C@codespeak.net> Author: Armin Rigo Branch: jit-shadowstack Changeset: r43062:8054ede579d8 Date: 2011-03-31 20:47 +0200 http://bitbucket.org/pypy/pypy/changeset/8054ede579d8/ Log: Fix: call_may_force ended up calling write_new_force_index(), thus getting two force_indexes for the operation. diff --git a/pypy/jit/backend/x86/assembler.py b/pypy/jit/backend/x86/assembler.py --- a/pypy/jit/backend/x86/assembler.py +++ b/pypy/jit/backend/x86/assembler.py @@ -1851,6 +1851,10 @@ self.pending_guard_tokens.append(guard_token) def genop_call(self, op, arglocs, resloc): + force_index = self.write_new_force_index() + self._genop_call(op, arglocs, resloc, force_index) + + def _genop_call(self, op, arglocs, resloc, force_index): sizeloc = arglocs[0] assert isinstance(sizeloc, ImmedLoc) size = sizeloc.value @@ -1865,7 +1869,6 @@ else: tmp = eax - force_index = self.write_new_force_index() self._emit_call(force_index, x, arglocs, 3, tmp=tmp) if IS_X86_32 and isinstance(resloc, StackLoc) and resloc.width == 8: @@ -1897,7 +1900,7 @@ faildescr = guard_op.getdescr() fail_index = self.cpu.get_fail_descr_number(faildescr) self.mc.MOV_bi(FORCE_INDEX_OFS, fail_index) - self.genop_call(op, arglocs, result_loc) + self._genop_call(op, arglocs, result_loc, fail_index) self.mc.CMP_bi(FORCE_INDEX_OFS, 0) self.implement_guard(guard_token, 'L')